mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-28 00:18:25 -05:00
Compare commits
14 Commits
remove-cla
...
feat/text-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1259e0bdd | ||
|
|
5244bd94fc | ||
|
|
2134d777be | ||
|
|
962824c8af | ||
|
|
3e9d5d0d50 | ||
|
|
fac10c422b | ||
|
|
91c7896859 | ||
|
|
bab436231a | ||
|
|
859f3f8c06 | ||
|
|
d5c0f5b2df | ||
|
|
fbc2da36e6 | ||
|
|
75ecc4de92 | ||
|
|
f0c2503608 | ||
|
|
cfb7dc5aca |
@@ -178,5 +178,10 @@ AYRSHARE_JWT_KEY=
|
||||
SMARTLEAD_API_KEY=
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# PostHog Analytics
|
||||
# Get API key from https://posthog.com - Project Settings > Project API Key
|
||||
POSTHOG_API_KEY=
|
||||
POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
# Other Services
|
||||
AUTOMOD_API_KEY=
|
||||
|
||||
@@ -86,6 +86,8 @@ async def execute_graph_block(
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
if obj.disabled:
|
||||
raise HTTPException(status_code=403, detail=f"Block #{block_id} is disabled.")
|
||||
|
||||
output = defaultdict(list)
|
||||
async for name, data in obj.execute(data):
|
||||
|
||||
@@ -31,6 +31,7 @@ class ResponseType(str, Enum):
|
||||
# Other
|
||||
ERROR = "error"
|
||||
USAGE = "usage"
|
||||
HEARTBEAT = "heartbeat"
|
||||
|
||||
|
||||
class StreamBaseResponse(BaseModel):
|
||||
@@ -142,3 +143,20 @@ class StreamError(StreamBaseResponse):
|
||||
details: dict[str, Any] | None = Field(
|
||||
default=None, description="Additional error details"
|
||||
)
|
||||
|
||||
|
||||
class StreamHeartbeat(StreamBaseResponse):
|
||||
"""Heartbeat to keep SSE connection alive during long-running operations.
|
||||
|
||||
Uses SSE comment format (: comment) which is ignored by clients but keeps
|
||||
the connection alive through proxies and load balancers.
|
||||
"""
|
||||
|
||||
type: ResponseType = ResponseType.HEARTBEAT
|
||||
toolCallId: str | None = Field(
|
||||
default=None, description="Tool call ID if heartbeat is for a specific tool"
|
||||
)
|
||||
|
||||
def to_sse(self) -> str:
|
||||
"""Convert to SSE comment format to keep connection alive."""
|
||||
return ": heartbeat\n\n"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,10 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import track_tool_called
|
||||
|
||||
from .add_understanding import AddUnderstandingTool
|
||||
from .agent_output import AgentOutputTool
|
||||
@@ -20,6 +22,8 @@ from .search_docs import SearchDocsTool
|
||||
if TYPE_CHECKING:
|
||||
from backend.api.features.chat.response_model import StreamToolOutputAvailable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Single source of truth for all tools
|
||||
TOOL_REGISTRY: dict[str, BaseTool] = {
|
||||
"add_understanding": AddUnderstandingTool(),
|
||||
@@ -56,4 +60,17 @@ async def execute_tool(
|
||||
tool = TOOL_REGISTRY.get(tool_name)
|
||||
if not tool:
|
||||
raise ValueError(f"Tool {tool_name} not found")
|
||||
|
||||
# Track tool call in PostHog
|
||||
logger.info(
|
||||
f"Tracking tool call: tool={tool_name}, user={user_id}, "
|
||||
f"session={session.session_id}, call_id={tool_call_id}"
|
||||
)
|
||||
track_tool_called(
|
||||
user_id=user_id,
|
||||
session_id=session.session_id,
|
||||
tool_name=tool_name,
|
||||
tool_call_id=tool_call_id,
|
||||
)
|
||||
|
||||
return await tool.execute(user_id, session, tool_call_id, **parameters)
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.data.understanding import (
|
||||
BusinessUnderstandingInput,
|
||||
@@ -61,7 +59,6 @@ and automations for the user's specific needs."""
|
||||
"""Requires authentication to store user-specific data."""
|
||||
return True
|
||||
|
||||
@observe(as_type="tool", name="add_understanding")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
|
||||
@@ -5,7 +5,6 @@ import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
@@ -329,7 +328,6 @@ class AgentOutputTool(BaseTool):
|
||||
total_executions=len(available_executions) if available_executions else 1,
|
||||
)
|
||||
|
||||
@observe(as_type="tool", name="view_agent_output")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
|
||||
from .agent_generator import (
|
||||
@@ -75,7 +73,6 @@ class CreateAgentTool(BaseTool):
|
||||
"required": ["description"],
|
||||
}
|
||||
|
||||
@observe(as_type="tool", name="create_agent")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
@@ -116,8 +113,11 @@ class CreateAgentTool(BaseTool):
|
||||
|
||||
if decomposition_result is None:
|
||||
return ErrorResponse(
|
||||
message="Failed to analyze the goal. Please try rephrasing.",
|
||||
error="Decomposition failed",
|
||||
message="Failed to analyze the goal. The agent generation service may be unavailable or timed out. Please try again.",
|
||||
error="decomposition_failed",
|
||||
details={
|
||||
"description": description[:100]
|
||||
}, # Include context for debugging
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
@@ -182,8 +182,11 @@ class CreateAgentTool(BaseTool):
|
||||
|
||||
if agent_json is None:
|
||||
return ErrorResponse(
|
||||
message="Failed to generate the agent. Please try again.",
|
||||
error="Generation failed",
|
||||
message="Failed to generate the agent. The agent generation service may be unavailable or timed out. Please try again.",
|
||||
error="generation_failed",
|
||||
details={
|
||||
"description": description[:100]
|
||||
}, # Include context for debugging
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
|
||||
from .agent_generator import (
|
||||
@@ -81,7 +79,6 @@ class EditAgentTool(BaseTool):
|
||||
"required": ["agent_id", "changes"],
|
||||
}
|
||||
|
||||
@observe(as_type="tool", name="edit_agent")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
@@ -145,8 +142,9 @@ class EditAgentTool(BaseTool):
|
||||
|
||||
if result is None:
|
||||
return ErrorResponse(
|
||||
message="Failed to generate changes. Please try rephrasing.",
|
||||
error="Update generation failed",
|
||||
message="Failed to generate changes. The agent generation service may be unavailable or timed out. Please try again.",
|
||||
error="update_generation_failed",
|
||||
details={"agent_id": agent_id, "changes": changes[:100]},
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
|
||||
from .agent_search import search_agents
|
||||
@@ -37,7 +35,6 @@ class FindAgentTool(BaseTool):
|
||||
"required": ["query"],
|
||||
}
|
||||
|
||||
@observe(as_type="tool", name="find_agent")
|
||||
async def _execute(
|
||||
self, user_id: str | None, session: ChatSession, **kwargs
|
||||
) -> ToolResponseBase:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
from prisma.enums import ContentType
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
@@ -56,7 +55,6 @@ class FindBlockTool(BaseTool):
|
||||
def requires_auth(self) -> bool:
|
||||
return True
|
||||
|
||||
@observe(as_type="tool", name="find_block")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
@@ -109,7 +107,8 @@ class FindBlockTool(BaseTool):
|
||||
block_id = result["content_id"]
|
||||
block = get_block(block_id)
|
||||
|
||||
if block:
|
||||
# Skip disabled blocks
|
||||
if block and not block.disabled:
|
||||
# Get input/output schemas
|
||||
input_schema = {}
|
||||
output_schema = {}
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
|
||||
from .agent_search import search_agents
|
||||
@@ -43,7 +41,6 @@ class FindLibraryAgentTool(BaseTool):
|
||||
def requires_auth(self) -> bool:
|
||||
return True
|
||||
|
||||
@observe(as_type="tool", name="find_library_agent")
|
||||
async def _execute(
|
||||
self, user_id: str | None, session: ChatSession, **kwargs
|
||||
) -> ToolResponseBase:
|
||||
|
||||
@@ -4,8 +4,6 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tools.base import BaseTool
|
||||
from backend.api.features.chat.tools.models import (
|
||||
@@ -73,7 +71,6 @@ class GetDocPageTool(BaseTool):
|
||||
url_path = path.rsplit(".", 1)[0] if "." in path else path
|
||||
return f"{DOCS_BASE_URL}/{url_path}"
|
||||
|
||||
@observe(as_type="tool", name="get_doc_page")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
|
||||
@@ -3,11 +3,14 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.api.features.chat.config import ChatConfig
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import (
|
||||
track_agent_run_success,
|
||||
track_agent_scheduled,
|
||||
)
|
||||
from backend.api.features.library import db as library_db
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
@@ -155,7 +158,6 @@ class RunAgentTool(BaseTool):
|
||||
"""All operations require authentication."""
|
||||
return True
|
||||
|
||||
@observe(as_type="tool", name="run_agent")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
@@ -453,6 +455,16 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_runs.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_run_success(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
execution_id=execution.id,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
@@ -534,6 +546,18 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_schedules.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_scheduled(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
schedule_id=result.id,
|
||||
schedule_name=schedule_name,
|
||||
cron=cron,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
|
||||
@@ -4,8 +4,6 @@ import logging
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.data.block import get_block
|
||||
from backend.data.execution import ExecutionContext
|
||||
@@ -130,7 +128,6 @@ class RunBlockTool(BaseTool):
|
||||
|
||||
return matched_credentials, missing_credentials
|
||||
|
||||
@observe(as_type="tool", name="run_block")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
@@ -179,6 +176,11 @@ class RunBlockTool(BaseTool):
|
||||
message=f"Block '{block_id}' not found",
|
||||
session_id=session_id,
|
||||
)
|
||||
if block.disabled:
|
||||
return ErrorResponse(
|
||||
message=f"Block '{block_id}' is disabled",
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from langfuse import observe
|
||||
from prisma.enums import ContentType
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
@@ -88,7 +87,6 @@ class SearchDocsTool(BaseTool):
|
||||
url_path = path.rsplit(".", 1)[0] if "." in path else path
|
||||
return f"{DOCS_BASE_URL}/{url_path}"
|
||||
|
||||
@observe(as_type="tool", name="search_docs")
|
||||
async def _execute(
|
||||
self,
|
||||
user_id: str | None,
|
||||
|
||||
250
autogpt_platform/backend/backend/api/features/chat/tracking.py
Normal file
250
autogpt_platform/backend/backend/api/features/chat/tracking.py
Normal file
@@ -0,0 +1,250 @@
|
||||
"""PostHog analytics tracking for the chat system."""
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from posthog import Posthog
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
# PostHog client instance (lazily initialized)
|
||||
_posthog_client: Posthog | None = None
|
||||
|
||||
|
||||
def _shutdown_posthog() -> None:
|
||||
"""Flush and shutdown PostHog client on process exit."""
|
||||
if _posthog_client is not None:
|
||||
_posthog_client.flush()
|
||||
_posthog_client.shutdown()
|
||||
|
||||
|
||||
atexit.register(_shutdown_posthog)
|
||||
|
||||
|
||||
def _get_posthog_client() -> Posthog | None:
|
||||
"""Get or create the PostHog client instance."""
|
||||
global _posthog_client
|
||||
if _posthog_client is not None:
|
||||
return _posthog_client
|
||||
|
||||
if not settings.secrets.posthog_api_key:
|
||||
logger.debug("PostHog API key not configured, analytics disabled")
|
||||
return None
|
||||
|
||||
_posthog_client = Posthog(
|
||||
settings.secrets.posthog_api_key,
|
||||
host=settings.secrets.posthog_host,
|
||||
)
|
||||
logger.info(
|
||||
f"PostHog client initialized with host: {settings.secrets.posthog_host}"
|
||||
)
|
||||
return _posthog_client
|
||||
|
||||
|
||||
def _get_base_properties() -> dict[str, Any]:
|
||||
"""Get base properties included in all events."""
|
||||
return {
|
||||
"environment": settings.config.app_env.value,
|
||||
"source": "chat_copilot",
|
||||
}
|
||||
|
||||
|
||||
def track_user_message(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
message_length: int,
|
||||
) -> None:
|
||||
"""Track when a user sends a message in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
message_length: Length of the user's message
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"message_length": message_length,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id or f"anonymous_{session_id}",
|
||||
event="copilot_message_sent",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track user message: {e}")
|
||||
|
||||
|
||||
def track_tool_called(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
tool_name: str,
|
||||
tool_call_id: str,
|
||||
) -> None:
|
||||
"""Track when a tool is called in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
tool_name: Name of the tool being called
|
||||
tool_call_id: Unique ID of the tool call
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
logger.info("PostHog client not available for tool tracking")
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"tool_name": tool_name,
|
||||
"tool_call_id": tool_call_id,
|
||||
}
|
||||
distinct_id = user_id or f"anonymous_{session_id}"
|
||||
logger.info(
|
||||
f"Sending copilot_tool_called event to PostHog: distinct_id={distinct_id}, "
|
||||
f"tool_name={tool_name}"
|
||||
)
|
||||
client.capture(
|
||||
distinct_id=distinct_id,
|
||||
event="copilot_tool_called",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track tool call: {e}")
|
||||
|
||||
|
||||
def track_agent_run_success(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
execution_id: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully run.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
execution_id: ID of the execution
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"execution_id": execution_id,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_run_success",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent run: {e}")
|
||||
|
||||
|
||||
def track_agent_scheduled(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
schedule_id: str,
|
||||
schedule_name: str,
|
||||
cron: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully scheduled.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
schedule_id: ID of the schedule
|
||||
schedule_name: Name of the schedule
|
||||
cron: Cron expression for the schedule
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"schedule_id": schedule_id,
|
||||
"schedule_name": schedule_name,
|
||||
"cron": cron,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_scheduled",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent schedule: {e}")
|
||||
|
||||
|
||||
def track_trigger_setup(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
trigger_type: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when a trigger is set up for an agent.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
trigger_type: Type of trigger (e.g., 'webhook')
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"trigger_type": trigger_type,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_trigger_setup",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track trigger setup: {e}")
|
||||
@@ -164,9 +164,9 @@ async def test_process_review_action_approve_success(
|
||||
"""Test successful review approval"""
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -244,9 +244,9 @@ async def test_process_review_action_reject_success(
|
||||
"""Test successful review rejection"""
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -339,9 +339,9 @@ async def test_process_review_action_mixed_success(
|
||||
|
||||
# Mock the route functions
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {
|
||||
"test_node_123": sample_pending_review,
|
||||
@@ -463,9 +463,9 @@ async def test_process_review_action_review_not_found(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error when review is not found"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Return empty dict to simulate review not found
|
||||
mock_get_reviews_for_user.return_value = {}
|
||||
@@ -506,7 +506,7 @@ async def test_process_review_action_review_not_found(
|
||||
response = await client.post("/api/review/action", json=request_data)
|
||||
|
||||
assert response.status_code == 404
|
||||
assert "No pending review found" in response.json()["detail"]
|
||||
assert "Review(s) not found" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -517,9 +517,9 @@ async def test_process_review_action_partial_failure(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test handling of partial failures in review processing"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -567,9 +567,9 @@ async def test_process_review_action_invalid_node_exec_id(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test failure when trying to process review with invalid node execution ID"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Return empty dict to simulate review not found
|
||||
mock_get_reviews_for_user.return_value = {}
|
||||
@@ -596,7 +596,7 @@ async def test_process_review_action_invalid_node_exec_id(
|
||||
|
||||
# Returns 404 when review is not found
|
||||
assert response.status_code == 404
|
||||
assert "No pending review found" in response.json()["detail"]
|
||||
assert "Review(s) not found" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
@@ -607,9 +607,9 @@ async def test_process_review_action_auto_approve_creates_auto_approval_records(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that auto_approve_future_actions flag creates auto-approval records"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -737,9 +737,9 @@ async def test_process_review_action_without_auto_approve_still_loads_settings(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that execution context is created with settings even without auto-approve"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
mock_get_reviews_for_user.return_value = {"test_node_123": sample_pending_review}
|
||||
|
||||
@@ -885,9 +885,9 @@ async def test_process_review_action_auto_approve_only_applies_to_approved_revie
|
||||
reviewed_at=FIXED_NOW,
|
||||
)
|
||||
|
||||
# Mock get_pending_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
# Mock get_reviews_by_node_exec_ids (called to find the graph_exec_id)
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
# Need to return both reviews in WAITING state (before processing)
|
||||
approved_review_waiting = PendingHumanReviewModel(
|
||||
@@ -1031,9 +1031,9 @@ async def test_process_review_action_per_review_auto_approve_granularity(
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test that auto-approval can be set per-review (granular control)"""
|
||||
# Mock get_pending_reviews_by_node_exec_ids - return different reviews based on node_exec_id
|
||||
# Mock get_reviews_by_node_exec_ids - return different reviews based on node_exec_id
|
||||
mock_get_reviews_for_user = mocker.patch(
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_by_node_exec_ids"
|
||||
"backend.api.features.executions.review.routes.get_reviews_by_node_exec_ids"
|
||||
)
|
||||
|
||||
# Create a mapping of node_exec_id to review
|
||||
|
||||
@@ -14,9 +14,9 @@ from backend.data.execution import (
|
||||
from backend.data.graph import get_graph_settings
|
||||
from backend.data.human_review import (
|
||||
create_auto_approval_record,
|
||||
get_pending_reviews_by_node_exec_ids,
|
||||
get_pending_reviews_for_execution,
|
||||
get_pending_reviews_for_user,
|
||||
get_reviews_by_node_exec_ids,
|
||||
has_pending_reviews_for_graph_exec,
|
||||
process_all_reviews_for_execution,
|
||||
)
|
||||
@@ -137,17 +137,17 @@ async def process_review_action(
|
||||
detail="At least one review must be provided",
|
||||
)
|
||||
|
||||
# Batch fetch all requested reviews
|
||||
reviews_map = await get_pending_reviews_by_node_exec_ids(
|
||||
# Batch fetch all requested reviews (regardless of status for idempotent handling)
|
||||
reviews_map = await get_reviews_by_node_exec_ids(
|
||||
list(all_request_node_ids), user_id
|
||||
)
|
||||
|
||||
# Validate all reviews were found
|
||||
# Validate all reviews were found (must exist, any status is OK for now)
|
||||
missing_ids = all_request_node_ids - set(reviews_map.keys())
|
||||
if missing_ids:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"No pending review found for node execution(s): {', '.join(missing_ids)}",
|
||||
detail=f"Review(s) not found: {', '.join(missing_ids)}",
|
||||
)
|
||||
|
||||
# Validate all reviews belong to the same execution
|
||||
|
||||
@@ -188,6 +188,10 @@ class BlockHandler(ContentHandler):
|
||||
try:
|
||||
block_instance = block_cls()
|
||||
|
||||
# Skip disabled blocks - they shouldn't be indexed
|
||||
if block_instance.disabled:
|
||||
continue
|
||||
|
||||
# Build searchable text from block metadata
|
||||
parts = []
|
||||
if hasattr(block_instance, "name") and block_instance.name:
|
||||
@@ -248,12 +252,19 @@ class BlockHandler(ContentHandler):
|
||||
from backend.data.block import get_blocks
|
||||
|
||||
all_blocks = get_blocks()
|
||||
total_blocks = len(all_blocks)
|
||||
|
||||
# Filter out disabled blocks - they're not indexed
|
||||
enabled_block_ids = [
|
||||
block_id
|
||||
for block_id, block_cls in all_blocks.items()
|
||||
if not block_cls().disabled
|
||||
]
|
||||
total_blocks = len(enabled_block_ids)
|
||||
|
||||
if total_blocks == 0:
|
||||
return {"total": 0, "with_embeddings": 0, "without_embeddings": 0}
|
||||
|
||||
block_ids = list(all_blocks.keys())
|
||||
block_ids = enabled_block_ids
|
||||
placeholders = ",".join([f"${i+1}" for i in range(len(block_ids))])
|
||||
|
||||
embedded_result = await query_raw_with_schema(
|
||||
|
||||
@@ -81,6 +81,7 @@ async def test_block_handler_get_missing_items(mocker):
|
||||
mock_block_instance.name = "Calculator Block"
|
||||
mock_block_instance.description = "Performs calculations"
|
||||
mock_block_instance.categories = [MagicMock(value="MATH")]
|
||||
mock_block_instance.disabled = False
|
||||
mock_block_instance.input_schema.model_json_schema.return_value = {
|
||||
"properties": {"expression": {"description": "Math expression to evaluate"}}
|
||||
}
|
||||
@@ -116,11 +117,18 @@ async def test_block_handler_get_stats(mocker):
|
||||
"""Test BlockHandler returns correct stats."""
|
||||
handler = BlockHandler()
|
||||
|
||||
# Mock get_blocks
|
||||
# Mock get_blocks - each block class returns an instance with disabled=False
|
||||
def make_mock_block_class():
|
||||
mock_class = MagicMock()
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.disabled = False
|
||||
mock_class.return_value = mock_instance
|
||||
return mock_class
|
||||
|
||||
mock_blocks = {
|
||||
"block-1": MagicMock(),
|
||||
"block-2": MagicMock(),
|
||||
"block-3": MagicMock(),
|
||||
"block-1": make_mock_block_class(),
|
||||
"block-2": make_mock_block_class(),
|
||||
"block-3": make_mock_block_class(),
|
||||
}
|
||||
|
||||
# Mock embedded count query (2 blocks have embeddings)
|
||||
@@ -309,6 +317,7 @@ async def test_block_handler_handles_missing_attributes():
|
||||
mock_block_class = MagicMock()
|
||||
mock_block_instance = MagicMock()
|
||||
mock_block_instance.name = "Minimal Block"
|
||||
mock_block_instance.disabled = False
|
||||
# No description, categories, or schema
|
||||
del mock_block_instance.description
|
||||
del mock_block_instance.categories
|
||||
@@ -342,6 +351,7 @@ async def test_block_handler_skips_failed_blocks():
|
||||
good_instance.name = "Good Block"
|
||||
good_instance.description = "Works fine"
|
||||
good_instance.categories = []
|
||||
good_instance.disabled = False
|
||||
good_block.return_value = good_instance
|
||||
|
||||
bad_block = MagicMock()
|
||||
|
||||
@@ -364,6 +364,8 @@ async def execute_graph_block(
|
||||
obj = get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
if obj.disabled:
|
||||
raise HTTPException(status_code=403, detail=f"Block #{block_id} is disabled.")
|
||||
|
||||
user = await get_user_by_id(user_id)
|
||||
if not user:
|
||||
|
||||
@@ -138,6 +138,7 @@ def test_execute_graph_block(
|
||||
"""Test execute block endpoint"""
|
||||
# Mock block
|
||||
mock_block = Mock()
|
||||
mock_block.disabled = False
|
||||
|
||||
async def mock_execute(*args, **kwargs):
|
||||
yield "output1", {"data": "result1"}
|
||||
|
||||
73
autogpt_platform/backend/backend/blocks/encoder_block.py
Normal file
73
autogpt_platform/backend/backend/blocks/encoder_block.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""Text encoding block for converting special characters to escape sequences."""
|
||||
|
||||
import codecs
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchemaInput,
|
||||
BlockSchemaOutput,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TextEncoderBlock(Block):
|
||||
"""
|
||||
Encodes a string by converting special characters into escape sequences.
|
||||
|
||||
This block is the inverse of TextDecoderBlock. It takes text containing
|
||||
special characters (like newlines, tabs, etc.) and converts them into
|
||||
their escape sequence representations (e.g., newline becomes \\n).
|
||||
"""
|
||||
class Input(BlockSchemaInput):
|
||||
"""Input schema for TextEncoderBlock."""
|
||||
|
||||
text: str = SchemaField(
|
||||
description="A string containing special characters to be encoded",
|
||||
placeholder="Your text with newlines and quotes to encode",
|
||||
)
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
"""Output schema for TextEncoderBlock."""
|
||||
|
||||
encoded_text: str = SchemaField(
|
||||
description="The encoded text with special characters converted to escape sequences"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5185f32e-4b65-4ecf-8fbb-873f003f09d6",
|
||||
description="Encodes a string by converting special characters into escape sequences",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextEncoderBlock.Input,
|
||||
output_schema=TextEncoderBlock.Output,
|
||||
test_input={"text": """Hello
|
||||
World!
|
||||
This is a "quoted" string."""},
|
||||
test_output=[
|
||||
(
|
||||
"encoded_text",
|
||||
"""Hello\\nWorld!\\nThis is a "quoted" string.""",
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
"""
|
||||
Encode the input text by converting special characters to escape sequences.
|
||||
|
||||
Args:
|
||||
input_data: The input containing the text to encode.
|
||||
**kwargs: Additional keyword arguments (unused).
|
||||
|
||||
Yields:
|
||||
The encoded text with escape sequences, or an error message on failure.
|
||||
"""
|
||||
try:
|
||||
encoded_text = codecs.encode(input_data.text, "unicode_escape").decode(
|
||||
"utf-8"
|
||||
)
|
||||
yield "encoded_text", encoded_text
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to encode text: {e}"
|
||||
@@ -115,6 +115,7 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
CLAUDE_4_5_OPUS = "claude-opus-4-5-20251101"
|
||||
CLAUDE_4_5_SONNET = "claude-sonnet-4-5-20250929"
|
||||
CLAUDE_4_5_HAIKU = "claude-haiku-4-5-20251001"
|
||||
CLAUDE_3_7_SONNET = "claude-3-7-sonnet-20250219"
|
||||
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
|
||||
# AI/ML API models
|
||||
AIML_API_QWEN2_5_72B = "Qwen/Qwen2.5-72B-Instruct-Turbo"
|
||||
@@ -279,6 +280,9 @@ MODEL_METADATA = {
|
||||
LlmModel.CLAUDE_4_5_HAIKU: ModelMetadata(
|
||||
"anthropic", 200000, 64000, "Claude Haiku 4.5", "Anthropic", "Anthropic", 2
|
||||
), # claude-haiku-4-5-20251001
|
||||
LlmModel.CLAUDE_3_7_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 64000, "Claude 3.7 Sonnet", "Anthropic", "Anthropic", 2
|
||||
), # claude-3-7-sonnet-20250219
|
||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata(
|
||||
"anthropic", 200000, 4096, "Claude 3 Haiku", "Anthropic", "Anthropic", 1
|
||||
), # claude-3-haiku-20240307
|
||||
|
||||
@@ -83,7 +83,7 @@ class StagehandRecommendedLlmModel(str, Enum):
|
||||
GPT41_MINI = "gpt-4.1-mini-2025-04-14"
|
||||
|
||||
# Anthropic
|
||||
CLAUDE_4_5_SONNET = "claude-sonnet-4-5-20250929"
|
||||
CLAUDE_3_7_SONNET = "claude-3-7-sonnet-20250219"
|
||||
|
||||
@property
|
||||
def provider_name(self) -> str:
|
||||
@@ -137,7 +137,7 @@ class StagehandObserveBlock(Block):
|
||||
model: StagehandRecommendedLlmModel = SchemaField(
|
||||
title="LLM Model",
|
||||
description="LLM to use for Stagehand (provider is inferred)",
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_4_5_SONNET,
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_3_7_SONNET,
|
||||
advanced=False,
|
||||
)
|
||||
model_credentials: AICredentials = AICredentialsField()
|
||||
@@ -230,7 +230,7 @@ class StagehandActBlock(Block):
|
||||
model: StagehandRecommendedLlmModel = SchemaField(
|
||||
title="LLM Model",
|
||||
description="LLM to use for Stagehand (provider is inferred)",
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_4_5_SONNET,
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_3_7_SONNET,
|
||||
advanced=False,
|
||||
)
|
||||
model_credentials: AICredentials = AICredentialsField()
|
||||
@@ -330,7 +330,7 @@ class StagehandExtractBlock(Block):
|
||||
model: StagehandRecommendedLlmModel = SchemaField(
|
||||
title="LLM Model",
|
||||
description="LLM to use for Stagehand (provider is inferred)",
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_4_5_SONNET,
|
||||
default=StagehandRecommendedLlmModel.CLAUDE_3_7_SONNET,
|
||||
advanced=False,
|
||||
)
|
||||
model_credentials: AICredentials = AICredentialsField()
|
||||
|
||||
@@ -81,6 +81,7 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.CLAUDE_4_5_HAIKU: 4,
|
||||
LlmModel.CLAUDE_4_5_OPUS: 14,
|
||||
LlmModel.CLAUDE_4_5_SONNET: 9,
|
||||
LlmModel.CLAUDE_3_7_SONNET: 5,
|
||||
LlmModel.CLAUDE_3_HAIKU: 1,
|
||||
LlmModel.AIML_API_QWEN2_5_72B: 1,
|
||||
LlmModel.AIML_API_LLAMA3_1_70B: 1,
|
||||
|
||||
@@ -263,11 +263,14 @@ async def get_pending_review_by_node_exec_id(
|
||||
return PendingHumanReviewModel.from_db(review, node_id=node_id)
|
||||
|
||||
|
||||
async def get_pending_reviews_by_node_exec_ids(
|
||||
async def get_reviews_by_node_exec_ids(
|
||||
node_exec_ids: list[str], user_id: str
|
||||
) -> dict[str, "PendingHumanReviewModel"]:
|
||||
"""
|
||||
Get multiple pending reviews by their node execution IDs in a single batch query.
|
||||
Get multiple reviews by their node execution IDs regardless of status.
|
||||
|
||||
Unlike get_pending_reviews_by_node_exec_ids, this returns reviews in any status
|
||||
(WAITING, APPROVED, REJECTED). Used for validation in idempotent operations.
|
||||
|
||||
Args:
|
||||
node_exec_ids: List of node execution IDs to look up
|
||||
@@ -283,7 +286,6 @@ async def get_pending_reviews_by_node_exec_ids(
|
||||
where={
|
||||
"nodeExecId": {"in": node_exec_ids},
|
||||
"userId": user_id,
|
||||
"status": ReviewStatus.WAITING,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -407,38 +409,68 @@ async def process_all_reviews_for_execution(
|
||||
) -> dict[str, PendingHumanReviewModel]:
|
||||
"""Process all pending reviews for an execution with approve/reject decisions.
|
||||
|
||||
Handles race conditions gracefully: if a review was already processed with the
|
||||
same decision by a concurrent request, it's treated as success rather than error.
|
||||
|
||||
Args:
|
||||
user_id: User ID for ownership validation
|
||||
review_decisions: Map of node_exec_id -> (status, reviewed_data, message)
|
||||
|
||||
Returns:
|
||||
Dict of node_exec_id -> updated review model
|
||||
Dict of node_exec_id -> updated review model (includes already-processed reviews)
|
||||
"""
|
||||
if not review_decisions:
|
||||
return {}
|
||||
|
||||
node_exec_ids = list(review_decisions.keys())
|
||||
|
||||
# Get all reviews for validation
|
||||
reviews = await PendingHumanReview.prisma().find_many(
|
||||
# Get all reviews (both WAITING and already processed) for the user
|
||||
all_reviews = await PendingHumanReview.prisma().find_many(
|
||||
where={
|
||||
"nodeExecId": {"in": node_exec_ids},
|
||||
"userId": user_id,
|
||||
"status": ReviewStatus.WAITING,
|
||||
},
|
||||
)
|
||||
|
||||
# Validate all reviews can be processed
|
||||
if len(reviews) != len(node_exec_ids):
|
||||
missing_ids = set(node_exec_ids) - {review.nodeExecId for review in reviews}
|
||||
# Separate into pending and already-processed reviews
|
||||
reviews_to_process = []
|
||||
already_processed = []
|
||||
for review in all_reviews:
|
||||
if review.status == ReviewStatus.WAITING:
|
||||
reviews_to_process.append(review)
|
||||
else:
|
||||
already_processed.append(review)
|
||||
|
||||
# Check for truly missing reviews (not found at all)
|
||||
found_ids = {review.nodeExecId for review in all_reviews}
|
||||
missing_ids = set(node_exec_ids) - found_ids
|
||||
if missing_ids:
|
||||
raise ValueError(
|
||||
f"Reviews not found, access denied, or not in WAITING status: {', '.join(missing_ids)}"
|
||||
f"Reviews not found or access denied: {', '.join(missing_ids)}"
|
||||
)
|
||||
|
||||
# Create parallel update tasks
|
||||
# Validate already-processed reviews have compatible status (same decision)
|
||||
# This handles race conditions where another request processed the same reviews
|
||||
for review in already_processed:
|
||||
requested_status = review_decisions[review.nodeExecId][0]
|
||||
if review.status != requested_status:
|
||||
raise ValueError(
|
||||
f"Review {review.nodeExecId} was already processed with status "
|
||||
f"{review.status}, cannot change to {requested_status}"
|
||||
)
|
||||
|
||||
# Log if we're handling a race condition (some reviews already processed)
|
||||
if already_processed:
|
||||
already_processed_ids = [r.nodeExecId for r in already_processed]
|
||||
logger.info(
|
||||
f"Race condition handled: {len(already_processed)} review(s) already "
|
||||
f"processed by concurrent request: {already_processed_ids}"
|
||||
)
|
||||
|
||||
# Create parallel update tasks for reviews that still need processing
|
||||
update_tasks = []
|
||||
|
||||
for review in reviews:
|
||||
for review in reviews_to_process:
|
||||
new_status, reviewed_data, message = review_decisions[review.nodeExecId]
|
||||
has_data_changes = reviewed_data is not None and reviewed_data != review.payload
|
||||
|
||||
@@ -463,7 +495,7 @@ async def process_all_reviews_for_execution(
|
||||
update_tasks.append(task)
|
||||
|
||||
# Execute all updates in parallel and get updated reviews
|
||||
updated_reviews = await asyncio.gather(*update_tasks)
|
||||
updated_reviews = await asyncio.gather(*update_tasks) if update_tasks else []
|
||||
|
||||
# Note: Execution resumption is now handled at the API layer after ALL reviews
|
||||
# for an execution are processed (both approved and rejected)
|
||||
@@ -472,8 +504,11 @@ async def process_all_reviews_for_execution(
|
||||
# Local import to avoid event loop conflicts in tests
|
||||
from backend.data.execution import get_node_execution
|
||||
|
||||
# Combine updated reviews with already-processed ones (for idempotent response)
|
||||
all_result_reviews = list(updated_reviews) + already_processed
|
||||
|
||||
result = {}
|
||||
for review in updated_reviews:
|
||||
for review in all_result_reviews:
|
||||
node_exec = await get_node_execution(review.nodeExecId)
|
||||
node_id = node_exec.node_id if node_exec else review.nodeExecId
|
||||
result[review.nodeExecId] = PendingHumanReviewModel.from_db(
|
||||
|
||||
@@ -666,16 +666,10 @@ class CredentialsFieldInfo(BaseModel, Generic[CP, CT]):
|
||||
if not (self.discriminator and self.discriminator_mapping):
|
||||
return self
|
||||
|
||||
try:
|
||||
provider = self.discriminator_mapping[discriminator_value]
|
||||
except KeyError:
|
||||
raise ValueError(
|
||||
f"Model '{discriminator_value}' is not supported. "
|
||||
"It may have been deprecated. Please update your agent configuration."
|
||||
)
|
||||
|
||||
return CredentialsFieldInfo(
|
||||
credentials_provider=frozenset([provider]),
|
||||
credentials_provider=frozenset(
|
||||
[self.discriminator_mapping[discriminator_value]]
|
||||
),
|
||||
credentials_types=self.supported_types,
|
||||
credentials_scopes=self.required_scopes,
|
||||
discriminator=self.discriminator,
|
||||
|
||||
@@ -679,6 +679,12 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
default="https://cloud.langfuse.com", description="Langfuse host URL"
|
||||
)
|
||||
|
||||
# PostHog analytics
|
||||
posthog_api_key: str = Field(default="", description="PostHog API key")
|
||||
posthog_host: str = Field(
|
||||
default="https://eu.i.posthog.com", description="PostHog host URL"
|
||||
)
|
||||
|
||||
# Add more secret fields as needed
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
-- Migrate Claude 3.7 Sonnet to Claude 4.5 Sonnet
|
||||
-- This updates all AgentNode blocks that use the deprecated Claude 3.7 Sonnet model
|
||||
-- Anthropic is retiring claude-3-7-sonnet-20250219 on February 19, 2026
|
||||
|
||||
-- Update AgentNode constant inputs
|
||||
UPDATE "AgentNode"
|
||||
SET "constantInput" = JSONB_SET(
|
||||
"constantInput"::jsonb,
|
||||
'{model}',
|
||||
'"claude-sonnet-4-5-20250929"'::jsonb
|
||||
)
|
||||
WHERE "constantInput"::jsonb->>'model' = 'claude-3-7-sonnet-20250219';
|
||||
|
||||
-- Update AgentPreset input overrides (stored in AgentNodeExecutionInputOutput)
|
||||
UPDATE "AgentNodeExecutionInputOutput"
|
||||
SET "data" = JSONB_SET(
|
||||
"data"::jsonb,
|
||||
'{model}',
|
||||
'"claude-sonnet-4-5-20250929"'::jsonb
|
||||
)
|
||||
WHERE "agentPresetId" IS NOT NULL
|
||||
AND "data"::jsonb->>'model' = 'claude-3-7-sonnet-20250219';
|
||||
12
autogpt_platform/backend/poetry.lock
generated
12
autogpt_platform/backend/poetry.lock
generated
@@ -4204,14 +4204,14 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "6.1.1"
|
||||
version = "7.6.0"
|
||||
description = "Integrate PostHog into any python application."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "posthog-6.1.1-py3-none-any.whl", hash = "sha256:329fd3d06b4d54cec925f47235bd8e327c91403c2f9ec38f1deb849535934dba"},
|
||||
{file = "posthog-6.1.1.tar.gz", hash = "sha256:b453f54c4a2589da859fd575dd3bf86fcb40580727ec399535f268b1b9f318b8"},
|
||||
{file = "posthog-7.6.0-py3-none-any.whl", hash = "sha256:c4dd78cf77c4fecceb965f86066e5ac37886ef867d68ffe75a1db5d681d7d9ad"},
|
||||
{file = "posthog-7.6.0.tar.gz", hash = "sha256:941dfd278ee427c9b14640f09b35b5bb52a71bdf028d7dbb7307e1838fd3002e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4225,7 +4225,7 @@ typing-extensions = ">=4.2.0"
|
||||
[package.extras]
|
||||
dev = ["django-stubs", "lxml", "mypy", "mypy-baseline", "packaging", "pre-commit", "pydantic", "ruff", "setuptools", "tomli", "tomli_w", "twine", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six", "wheel"]
|
||||
langchain = ["langchain (>=0.2.0)"]
|
||||
test = ["anthropic", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=0.3.15)", "langchain-community (>=0.3.25)", "langchain-core (>=0.3.65)", "langchain-openai (>=0.3.22)", "langgraph (>=0.4.8)", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
test = ["anthropic (>=0.72)", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=1.0)", "langchain-community (>=0.4)", "langchain-core (>=1.0)", "langchain-openai (>=1.0)", "langgraph (>=1.0)", "mock (>=2.0.0)", "openai (>=2.0)", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "postmarker"
|
||||
@@ -7512,4 +7512,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "18b92e09596298c82432e4d0a85cb6d80a40b4229bee0a0c15f0529fd6cb21a4"
|
||||
content-hash = "ee5742dc1a9df50dfc06d4b26a1682cbb2b25cab6b79ce5625ec272f93e4f4bf"
|
||||
|
||||
@@ -85,6 +85,7 @@ exa-py = "^1.14.20"
|
||||
croniter = "^6.0.0"
|
||||
stagehand = "^0.5.1"
|
||||
gravitas-md2gdocs = "^0.1.0"
|
||||
posthog = "^7.6.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aiohappyeyeballs = "^2.6.1"
|
||||
|
||||
@@ -30,3 +30,7 @@ NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# PR previews
|
||||
NEXT_PUBLIC_PREVIEW_STEALING_DEV=
|
||||
|
||||
# PostHog Analytics
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@posthog/react": "1.7.0",
|
||||
"@radix-ui/react-accordion": "1.2.12",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
"@radix-ui/react-avatar": "1.1.10",
|
||||
@@ -91,6 +92,7 @@
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.7.2",
|
||||
"party-js": "2.2.0",
|
||||
"posthog-js": "1.334.1",
|
||||
"react": "18.3.1",
|
||||
"react-currency-input-field": "4.0.3",
|
||||
"react-day-picker": "9.11.1",
|
||||
@@ -120,7 +122,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.1.2",
|
||||
"happy-dom": "20.3.4",
|
||||
"@opentelemetry/instrumentation": "0.209.0",
|
||||
"@playwright/test": "1.56.1",
|
||||
"@storybook/addon-a11y": "9.1.5",
|
||||
@@ -148,6 +149,7 @@
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.5.7",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
"happy-dom": "20.3.4",
|
||||
"import-in-the-middle": "2.0.2",
|
||||
"msw": "2.11.6",
|
||||
"msw-storybook-addon": "2.0.6",
|
||||
|
||||
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -23,6 +23,9 @@ importers:
|
||||
'@phosphor-icons/react':
|
||||
specifier: 2.1.10
|
||||
version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
'@posthog/react':
|
||||
specifier: 1.7.0
|
||||
version: 1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)
|
||||
'@radix-ui/react-accordion':
|
||||
specifier: 1.2.12
|
||||
version: 1.2.12(@types/react-dom@18.3.5(@types/react@18.3.17))(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -194,6 +197,9 @@ importers:
|
||||
party-js:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0
|
||||
posthog-js:
|
||||
specifier: 1.334.1
|
||||
version: 1.334.1
|
||||
react:
|
||||
specifier: 18.3.1
|
||||
version: 18.3.1
|
||||
@@ -1794,6 +1800,10 @@ packages:
|
||||
'@open-draft/until@2.1.0':
|
||||
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
resolution: {integrity: sha512-xomnUNi7TiAGtOgs0tb54LyrjRZLu9shJGGwkcN7NgtiPYOpNnKLkRJtzZvTjD/w6knSZH9sFZcUSUovYOPg6A==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
@@ -1814,6 +1824,12 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.0.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0':
|
||||
resolution: {integrity: sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0':
|
||||
resolution: {integrity: sha512-5ULoU8p+tWcQw5PDYZn8rySptGSLZHNX/7srqo2TioPnAAcvTy6sQFQXsNPrAnyRRtYGMetXVyZUy5OaX1+IfA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1952,6 +1968,18 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0':
|
||||
resolution: {integrity: sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0':
|
||||
resolution: {integrity: sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2':
|
||||
resolution: {integrity: sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1962,6 +1990,18 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.3.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0':
|
||||
resolution: {integrity: sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.4.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0':
|
||||
resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.9.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0':
|
||||
resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -2050,11 +2090,57 @@ packages:
|
||||
webpack-plugin-serve:
|
||||
optional: true
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
resolution: {integrity: sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg==}
|
||||
|
||||
'@posthog/react@1.7.0':
|
||||
resolution: {integrity: sha512-pM7GL7z/rKjiIwosbRiQA3buhLI6vUo+wg+T/ZrVZC7O5bVU07TfgNZTcuOj8E9dx7vDbfNrc1kjDN7PKMM8ug==}
|
||||
peerDependencies:
|
||||
'@types/react': '>=16.8.0'
|
||||
posthog-js: '>=1.257.2'
|
||||
react: '>=16.8.0'
|
||||
peerDependenciesMeta:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
'@posthog/types@1.334.1':
|
||||
resolution: {integrity: sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA==}
|
||||
|
||||
'@prisma/instrumentation@6.19.0':
|
||||
resolution: {integrity: sha512-QcuYy25pkXM8BJ37wVFBO7Zh34nyRV1GOb2n3lPkkbRYfl4hWl3PTcImP41P0KrzVXfa/45p6eVCos27x3exIg==}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.8
|
||||
|
||||
'@protobufjs/aspromise@1.1.2':
|
||||
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
|
||||
|
||||
'@protobufjs/base64@1.1.2':
|
||||
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
|
||||
|
||||
'@protobufjs/codegen@2.0.4':
|
||||
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0':
|
||||
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
|
||||
|
||||
'@protobufjs/float@1.0.2':
|
||||
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
|
||||
|
||||
'@protobufjs/inquire@1.1.0':
|
||||
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
|
||||
|
||||
'@protobufjs/path@1.1.2':
|
||||
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
|
||||
|
||||
'@protobufjs/pool@1.1.0':
|
||||
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
|
||||
|
||||
'@protobufjs/utf8@1.1.0':
|
||||
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
|
||||
|
||||
'@radix-ui/number@1.1.1':
|
||||
resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==}
|
||||
|
||||
@@ -3401,6 +3487,9 @@ packages:
|
||||
'@types/tedious@4.0.14':
|
||||
resolution: {integrity: sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==}
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
|
||||
|
||||
'@types/unist@2.0.11':
|
||||
resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==}
|
||||
|
||||
@@ -4278,6 +4367,9 @@ packages:
|
||||
core-js-pure@3.47.0:
|
||||
resolution: {integrity: sha512-BcxeDbzUrRnXGYIVAGFtcGQVNpFcUhVjr6W7F8XktvQW2iJP9e66GP6xdKotCRFlrxBvNIBrhwKteRXqMV86Nw==}
|
||||
|
||||
core-js@3.48.0:
|
||||
resolution: {integrity: sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==}
|
||||
|
||||
core-util-is@1.0.3:
|
||||
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||
|
||||
@@ -4569,6 +4661,9 @@ packages:
|
||||
resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
|
||||
engines: {node: '>= 4'}
|
||||
|
||||
dompurify@3.3.1:
|
||||
resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==}
|
||||
|
||||
domutils@2.8.0:
|
||||
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
|
||||
|
||||
@@ -4939,6 +5034,9 @@ packages:
|
||||
picomatch:
|
||||
optional: true
|
||||
|
||||
fflate@0.4.8:
|
||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
||||
engines: {node: ^10.12.0 || >=12.0.0}
|
||||
@@ -5745,6 +5843,9 @@ packages:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
long@5.3.2:
|
||||
resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -6534,6 +6635,12 @@ packages:
|
||||
resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
posthog-js@1.334.1:
|
||||
resolution: {integrity: sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ==}
|
||||
|
||||
preact@10.28.2:
|
||||
resolution: {integrity: sha512-lbteaWGzGHdlIuiJ0l2Jq454m6kcpI1zNje6d8MlGAFlYvP2GO4ibnat7P74Esfz4sPTdM6UxtTwh/d3pwM9JA==}
|
||||
|
||||
prelude-ls@1.2.1:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@@ -6622,6 +6729,10 @@ packages:
|
||||
property-information@7.1.0:
|
||||
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
proxy-from-env@1.1.0:
|
||||
resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==}
|
||||
|
||||
@@ -6643,6 +6754,9 @@ packages:
|
||||
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||
engines: {node: '>=0.6'}
|
||||
|
||||
query-selector-shadow-dom@1.0.1:
|
||||
resolution: {integrity: sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw==}
|
||||
|
||||
querystring-es3@0.2.1:
|
||||
resolution: {integrity: sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==}
|
||||
engines: {node: '>=0.4.x'}
|
||||
@@ -7821,6 +7935,9 @@ packages:
|
||||
web-namespaces@2.0.1:
|
||||
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==}
|
||||
|
||||
web-vitals@5.1.0:
|
||||
resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==}
|
||||
|
||||
webidl-conversions@3.0.1:
|
||||
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
|
||||
|
||||
@@ -9420,6 +9537,10 @@ snapshots:
|
||||
|
||||
'@open-draft/until@2.1.0': {}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9435,6 +9556,15 @@ snapshots:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9629,6 +9759,23 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
protobufjs: 7.5.4
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2': {}
|
||||
|
||||
'@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
@@ -9637,6 +9784,19 @@ snapshots:
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9801,6 +9961,19 @@ snapshots:
|
||||
type-fest: 4.41.0
|
||||
webpack-hot-middleware: 2.26.1
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
dependencies:
|
||||
cross-spawn: 7.0.6
|
||||
|
||||
'@posthog/react@1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)':
|
||||
dependencies:
|
||||
posthog-js: 1.334.1
|
||||
react: 18.3.1
|
||||
optionalDependencies:
|
||||
'@types/react': 18.3.17
|
||||
|
||||
'@posthog/types@1.334.1': {}
|
||||
|
||||
'@prisma/instrumentation@6.19.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9808,6 +9981,29 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@protobufjs/aspromise@1.1.2': {}
|
||||
|
||||
'@protobufjs/base64@1.1.2': {}
|
||||
|
||||
'@protobufjs/codegen@2.0.4': {}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0': {}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
|
||||
'@protobufjs/float@1.0.2': {}
|
||||
|
||||
'@protobufjs/inquire@1.1.0': {}
|
||||
|
||||
'@protobufjs/path@1.1.2': {}
|
||||
|
||||
'@protobufjs/pool@1.1.0': {}
|
||||
|
||||
'@protobufjs/utf8@1.1.0': {}
|
||||
|
||||
'@radix-ui/number@1.1.1': {}
|
||||
|
||||
'@radix-ui/primitive@1.1.3': {}
|
||||
@@ -11426,6 +11622,9 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 24.10.0
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
optional: true
|
||||
|
||||
'@types/unist@2.0.11': {}
|
||||
|
||||
'@types/unist@3.0.3': {}
|
||||
@@ -12327,6 +12526,8 @@ snapshots:
|
||||
|
||||
core-js-pure@3.47.0: {}
|
||||
|
||||
core-js@3.48.0: {}
|
||||
|
||||
core-util-is@1.0.3: {}
|
||||
|
||||
cosmiconfig@7.1.0:
|
||||
@@ -12636,6 +12837,10 @@ snapshots:
|
||||
dependencies:
|
||||
domelementtype: 2.3.0
|
||||
|
||||
dompurify@3.3.1:
|
||||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
domutils@2.8.0:
|
||||
dependencies:
|
||||
dom-serializer: 1.4.1
|
||||
@@ -13205,6 +13410,8 @@ snapshots:
|
||||
optionalDependencies:
|
||||
picomatch: 4.0.3
|
||||
|
||||
fflate@0.4.8: {}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
dependencies:
|
||||
flat-cache: 3.2.0
|
||||
@@ -14092,6 +14299,8 @@ snapshots:
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
long@5.3.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -15154,6 +15363,24 @@ snapshots:
|
||||
dependencies:
|
||||
xtend: 4.0.2
|
||||
|
||||
posthog-js@1.334.1:
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/exporter-logs-otlp-http': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@posthog/core': 1.13.0
|
||||
'@posthog/types': 1.334.1
|
||||
core-js: 3.48.0
|
||||
dompurify: 3.3.1
|
||||
fflate: 0.4.8
|
||||
preact: 10.28.2
|
||||
query-selector-shadow-dom: 1.0.1
|
||||
web-vitals: 5.1.0
|
||||
|
||||
preact@10.28.2: {}
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.1(prettier@3.6.2):
|
||||
@@ -15187,6 +15414,21 @@ snapshots:
|
||||
|
||||
property-information@7.1.0: {}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/base64': 1.1.2
|
||||
'@protobufjs/codegen': 2.0.4
|
||||
'@protobufjs/eventemitter': 1.1.0
|
||||
'@protobufjs/fetch': 1.1.0
|
||||
'@protobufjs/float': 1.0.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
'@protobufjs/path': 1.1.2
|
||||
'@protobufjs/pool': 1.1.0
|
||||
'@protobufjs/utf8': 1.1.0
|
||||
'@types/node': 24.10.0
|
||||
long: 5.3.2
|
||||
|
||||
proxy-from-env@1.1.0: {}
|
||||
|
||||
public-encrypt@4.0.3:
|
||||
@@ -15208,6 +15450,8 @@ snapshots:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
|
||||
query-selector-shadow-dom@1.0.1: {}
|
||||
|
||||
querystring-es3@0.2.1: {}
|
||||
|
||||
queue-microtask@1.2.3: {}
|
||||
@@ -16619,6 +16863,8 @@ snapshots:
|
||||
|
||||
web-namespaces@2.0.1: {}
|
||||
|
||||
web-vitals@5.1.0: {}
|
||||
|
||||
webidl-conversions@3.0.1: {}
|
||||
|
||||
webidl-conversions@8.0.1:
|
||||
|
||||
@@ -38,8 +38,12 @@ export const AgentOutputs = ({ flowID }: { flowID: string | null }) => {
|
||||
|
||||
return outputNodes
|
||||
.map((node) => {
|
||||
const executionResult = node.data.nodeExecutionResult;
|
||||
const outputData = executionResult?.output_data?.output;
|
||||
const executionResults = node.data.nodeExecutionResults || [];
|
||||
const latestResult =
|
||||
executionResults.length > 0
|
||||
? executionResults[executionResults.length - 1]
|
||||
: undefined;
|
||||
const outputData = latestResult?.output_data?.output;
|
||||
|
||||
const renderer = globalRegistry.getRenderer(outputData);
|
||||
|
||||
|
||||
@@ -153,6 +153,9 @@ export const useRunInputDialog = ({
|
||||
Object.entries(credentialValues).filter(([_, cred]) => cred && cred.id),
|
||||
);
|
||||
|
||||
useNodeStore.getState().clearAllNodeExecutionResults();
|
||||
useNodeStore.getState().cleanNodesStatuses();
|
||||
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
|
||||
@@ -34,7 +34,7 @@ export type CustomNodeData = {
|
||||
uiType: BlockUIType;
|
||||
block_id: string;
|
||||
status?: AgentExecutionStatus;
|
||||
nodeExecutionResult?: NodeExecutionResult;
|
||||
nodeExecutionResults?: NodeExecutionResult[];
|
||||
staticOutput?: boolean;
|
||||
// TODO : We need better type safety for the following backend fields.
|
||||
costs: BlockCost[];
|
||||
@@ -75,7 +75,11 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
|
||||
(value) => value !== null && value !== undefined && value !== "",
|
||||
);
|
||||
|
||||
const outputData = data.nodeExecutionResult?.output_data;
|
||||
const latestResult =
|
||||
data.nodeExecutionResults && data.nodeExecutionResults.length > 0
|
||||
? data.nodeExecutionResults[data.nodeExecutionResults.length - 1]
|
||||
: undefined;
|
||||
const outputData = latestResult?.output_data;
|
||||
const hasOutputError =
|
||||
typeof outputData === "object" &&
|
||||
outputData !== null &&
|
||||
|
||||
@@ -14,10 +14,15 @@ import { useNodeOutput } from "./useNodeOutput";
|
||||
import { ViewMoreData } from "./components/ViewMoreData";
|
||||
|
||||
export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
const { outputData, copiedKey, handleCopy, executionResultId, inputData } =
|
||||
useNodeOutput(nodeId);
|
||||
const {
|
||||
latestOutputData,
|
||||
copiedKey,
|
||||
handleCopy,
|
||||
executionResultId,
|
||||
latestInputData,
|
||||
} = useNodeOutput(nodeId);
|
||||
|
||||
if (Object.keys(outputData).length === 0) {
|
||||
if (Object.keys(latestOutputData).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -41,18 +46,19 @@ export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
<div className="space-y-2">
|
||||
<Text variant="small-medium">Input</Text>
|
||||
|
||||
<ContentRenderer value={inputData} shortContent={false} />
|
||||
<ContentRenderer value={latestInputData} shortContent={false} />
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={inputData}
|
||||
pinName="Input"
|
||||
nodeId={nodeId}
|
||||
execId={executionResultId}
|
||||
dataType="input"
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy("input", inputData)}
|
||||
onClick={() => handleCopy("input", latestInputData)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === "input" &&
|
||||
@@ -68,70 +74,72 @@ export const NodeDataRenderer = ({ nodeId }: { nodeId: string }) => {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{Object.entries(outputData)
|
||||
{Object.entries(latestOutputData)
|
||||
.slice(0, 2)
|
||||
.map(([key, value]) => (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="small-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="small"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer value={item} shortContent={true} />
|
||||
</div>
|
||||
))}
|
||||
.map(([key, value]) => {
|
||||
return (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="small-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="small"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer
|
||||
value={item}
|
||||
shortContent={true}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={value}
|
||||
pinName={key}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon size={12} className="text-green-600" />
|
||||
) : (
|
||||
<CopyIcon size={12} />
|
||||
)}
|
||||
</Button>
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
pinName={key}
|
||||
nodeId={nodeId}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon
|
||||
size={12}
|
||||
className="text-green-600"
|
||||
/>
|
||||
) : (
|
||||
<CopyIcon size={12} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
{Object.keys(outputData).length > 2 && (
|
||||
<ViewMoreData
|
||||
outputData={outputData}
|
||||
execId={executionResultId}
|
||||
/>
|
||||
)}
|
||||
<ViewMoreData nodeId={nodeId} />
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
|
||||
@@ -19,22 +19,51 @@ import {
|
||||
CopyIcon,
|
||||
DownloadIcon,
|
||||
} from "@phosphor-icons/react";
|
||||
import { FC } from "react";
|
||||
import React, { FC } from "react";
|
||||
import { useNodeDataViewer } from "./useNodeDataViewer";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { NodeDataType } from "../../helpers";
|
||||
|
||||
interface NodeDataViewerProps {
|
||||
data: any;
|
||||
export interface NodeDataViewerProps {
|
||||
data?: any;
|
||||
pinName: string;
|
||||
nodeId?: string;
|
||||
execId?: string;
|
||||
isViewMoreData?: boolean;
|
||||
dataType?: NodeDataType;
|
||||
}
|
||||
|
||||
export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
data,
|
||||
pinName,
|
||||
nodeId,
|
||||
execId = "N/A",
|
||||
isViewMoreData = false,
|
||||
dataType = "output",
|
||||
}) => {
|
||||
const executionResults = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getNodeExecutionResults(nodeId) : [],
|
||||
),
|
||||
);
|
||||
const latestInputData = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getLatestNodeInputData(nodeId) : undefined,
|
||||
),
|
||||
);
|
||||
const accumulatedOutputData = useNodeStore(
|
||||
useShallow((state) =>
|
||||
nodeId ? state.getAccumulatedNodeOutputData(nodeId) : {},
|
||||
),
|
||||
);
|
||||
|
||||
const resolvedData =
|
||||
data ??
|
||||
(dataType === "input"
|
||||
? (latestInputData ?? {})
|
||||
: (accumulatedOutputData[pinName] ?? []));
|
||||
|
||||
const {
|
||||
outputItems,
|
||||
copyExecutionId,
|
||||
@@ -42,7 +71,20 @@ export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
handleDownloadItem,
|
||||
dataArray,
|
||||
copiedIndex,
|
||||
} = useNodeDataViewer(data, pinName, execId);
|
||||
groupedExecutions,
|
||||
totalGroupedItems,
|
||||
handleCopyGroupedItem,
|
||||
handleDownloadGroupedItem,
|
||||
copiedKey,
|
||||
} = useNodeDataViewer(
|
||||
resolvedData,
|
||||
pinName,
|
||||
execId,
|
||||
executionResults,
|
||||
dataType,
|
||||
);
|
||||
|
||||
const shouldGroupExecutions = groupedExecutions.length > 0;
|
||||
return (
|
||||
<Dialog styling={{ width: "600px" }}>
|
||||
<TooltipProvider>
|
||||
@@ -68,44 +110,141 @@ export const NodeDataViewer: FC<NodeDataViewerProps> = ({
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="large-medium" className="text-slate-900">
|
||||
Full Output Preview
|
||||
Full {dataType === "input" ? "Input" : "Output"} Preview
|
||||
</Text>
|
||||
</div>
|
||||
<div className="rounded-full border border-slate-300 bg-slate-100 px-3 py-1.5 text-xs font-medium text-black">
|
||||
{dataArray.length} item{dataArray.length !== 1 ? "s" : ""} total
|
||||
{shouldGroupExecutions ? totalGroupedItems : dataArray.length}{" "}
|
||||
item
|
||||
{shouldGroupExecutions
|
||||
? totalGroupedItems !== 1
|
||||
? "s"
|
||||
: ""
|
||||
: dataArray.length !== 1
|
||||
? "s"
|
||||
: ""}{" "}
|
||||
total
|
||||
</div>
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="mt-2">
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">{beautifyString(pinName)}</span>
|
||||
</div>
|
||||
{shouldGroupExecutions ? (
|
||||
<div>
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">{beautifyString(pinName)}</span>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="mt-2">
|
||||
Pin:{" "}
|
||||
<span className="font-semibold">
|
||||
{beautifyString(pinName)}
|
||||
</span>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex-1 overflow-hidden">
|
||||
<ScrollArea className="h-full">
|
||||
<div className="my-4">
|
||||
{dataArray.length > 0 ? (
|
||||
{shouldGroupExecutions ? (
|
||||
<div className="space-y-4">
|
||||
{groupedExecutions.map((execution) => (
|
||||
<div
|
||||
key={execution.execId}
|
||||
className="rounded-3xl border border-slate-200 bg-white p-4 shadow-sm"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execution.execId}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="mt-2 space-y-4">
|
||||
{execution.outputItems.length > 0 ? (
|
||||
execution.outputItems.map((item, index) => (
|
||||
<div
|
||||
key={item.key}
|
||||
className="group flex items-start gap-4"
|
||||
>
|
||||
<div className="w-full flex-1">
|
||||
<OutputItem
|
||||
value={item.value}
|
||||
metadata={item.metadata}
|
||||
renderer={item.renderer}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex w-fit gap-3">
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="min-w-0 p-1"
|
||||
size="icon"
|
||||
onClick={() =>
|
||||
handleCopyGroupedItem(
|
||||
execution.execId,
|
||||
index,
|
||||
item,
|
||||
)
|
||||
}
|
||||
aria-label="Copy item"
|
||||
>
|
||||
{copiedKey ===
|
||||
`${execution.execId}-${index}` ? (
|
||||
<CheckIcon className="size-4 text-green-600" />
|
||||
) : (
|
||||
<CopyIcon className="size-4 text-black" />
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="icon"
|
||||
className="min-w-0 p-1"
|
||||
onClick={() =>
|
||||
handleDownloadGroupedItem(item)
|
||||
}
|
||||
aria-label="Download item"
|
||||
>
|
||||
<DownloadIcon className="size-4 text-black" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<div className="py-4 text-center text-gray-500">
|
||||
No data available
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : dataArray.length > 0 ? (
|
||||
<div className="space-y-4">
|
||||
{outputItems.map((item, index) => (
|
||||
<div key={item.key} className="group relative">
|
||||
|
||||
@@ -1,82 +1,70 @@
|
||||
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
|
||||
import { globalRegistry } from "@/components/contextual/OutputRenderers";
|
||||
import { downloadOutputs } from "@/components/contextual/OutputRenderers/utils/download";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import React, { useMemo, useState } from "react";
|
||||
import { useState } from "react";
|
||||
import type { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import {
|
||||
NodeDataType,
|
||||
createOutputItems,
|
||||
getExecutionData,
|
||||
normalizeToArray,
|
||||
type OutputItem,
|
||||
} from "../../helpers";
|
||||
|
||||
export type GroupedExecution = {
|
||||
execId: string;
|
||||
outputItems: Array<OutputItem>;
|
||||
};
|
||||
|
||||
export const useNodeDataViewer = (
|
||||
data: any,
|
||||
pinName: string,
|
||||
execId: string,
|
||||
executionResults?: NodeExecutionResult[],
|
||||
dataType?: NodeDataType,
|
||||
) => {
|
||||
const { toast } = useToast();
|
||||
const [copiedIndex, setCopiedIndex] = useState<number | null>(null);
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
|
||||
// Normalize data to array format
|
||||
const dataArray = useMemo(() => {
|
||||
return Array.isArray(data) ? data : [data];
|
||||
}, [data]);
|
||||
const dataArray = Array.isArray(data) ? data : [data];
|
||||
|
||||
// Prepare items for the enhanced renderer system
|
||||
const outputItems = useMemo(() => {
|
||||
if (!dataArray) return [];
|
||||
|
||||
const items: Array<{
|
||||
key: string;
|
||||
label: string;
|
||||
value: unknown;
|
||||
metadata?: OutputMetadata;
|
||||
renderer: any;
|
||||
}> = [];
|
||||
|
||||
dataArray.forEach((value, index) => {
|
||||
const metadata: OutputMetadata = {};
|
||||
|
||||
// Extract metadata from the value if it's an object
|
||||
if (
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
!React.isValidElement(value)
|
||||
) {
|
||||
const objValue = value as any;
|
||||
if (objValue.type) metadata.type = objValue.type;
|
||||
if (objValue.mimeType) metadata.mimeType = objValue.mimeType;
|
||||
if (objValue.filename) metadata.filename = objValue.filename;
|
||||
if (objValue.language) metadata.language = objValue.language;
|
||||
}
|
||||
|
||||
const renderer = globalRegistry.getRenderer(value, metadata);
|
||||
if (renderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
const outputItems =
|
||||
!dataArray || dataArray.length === 0
|
||||
? []
|
||||
: createOutputItems(dataArray).map((item, index) => ({
|
||||
...item,
|
||||
label: index === 0 ? beautifyString(pinName) : "",
|
||||
value,
|
||||
metadata,
|
||||
renderer,
|
||||
});
|
||||
} else {
|
||||
// Fallback to text renderer
|
||||
const textRenderer = globalRegistry
|
||||
.getAllRenderers()
|
||||
.find((r) => r.name === "TextRenderer");
|
||||
if (textRenderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
label: index === 0 ? beautifyString(pinName) : "",
|
||||
value:
|
||||
typeof value === "string"
|
||||
? value
|
||||
: JSON.stringify(value, null, 2),
|
||||
metadata,
|
||||
renderer: textRenderer,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}));
|
||||
|
||||
return items;
|
||||
}, [dataArray, pinName]);
|
||||
const groupedExecutions =
|
||||
!executionResults || executionResults.length === 0
|
||||
? []
|
||||
: [...executionResults].reverse().map((result) => {
|
||||
const rawData = getExecutionData(
|
||||
result,
|
||||
dataType || "output",
|
||||
pinName,
|
||||
);
|
||||
let dataArray: unknown[];
|
||||
if (dataType === "input") {
|
||||
dataArray =
|
||||
rawData !== undefined && rawData !== null ? [rawData] : [];
|
||||
} else {
|
||||
dataArray = normalizeToArray(rawData);
|
||||
}
|
||||
|
||||
const outputItems = createOutputItems(dataArray);
|
||||
return {
|
||||
execId: result.node_exec_id,
|
||||
outputItems,
|
||||
};
|
||||
});
|
||||
|
||||
const totalGroupedItems = groupedExecutions.reduce(
|
||||
(total, execution) => total + execution.outputItems.length,
|
||||
0,
|
||||
);
|
||||
|
||||
const copyExecutionId = () => {
|
||||
navigator.clipboard.writeText(execId).then(() => {
|
||||
@@ -122,6 +110,45 @@ export const useNodeDataViewer = (
|
||||
]);
|
||||
};
|
||||
|
||||
const handleCopyGroupedItem = async (
|
||||
execId: string,
|
||||
index: number,
|
||||
item: OutputItem,
|
||||
) => {
|
||||
const copyContent = item.renderer.getCopyContent(item.value, item.metadata);
|
||||
|
||||
if (!copyContent) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let text: string;
|
||||
if (typeof copyContent.data === "string") {
|
||||
text = copyContent.data;
|
||||
} else if (copyContent.fallbackText) {
|
||||
text = copyContent.fallbackText;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
await navigator.clipboard.writeText(text);
|
||||
setCopiedKey(`${execId}-${index}`);
|
||||
setTimeout(() => setCopiedKey(null), 2000);
|
||||
} catch (error) {
|
||||
console.error("Failed to copy:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleDownloadGroupedItem = (item: OutputItem) => {
|
||||
downloadOutputs([
|
||||
{
|
||||
value: item.value,
|
||||
metadata: item.metadata,
|
||||
renderer: item.renderer,
|
||||
},
|
||||
]);
|
||||
};
|
||||
|
||||
return {
|
||||
outputItems,
|
||||
dataArray,
|
||||
@@ -129,5 +156,10 @@ export const useNodeDataViewer = (
|
||||
handleCopyItem,
|
||||
handleDownloadItem,
|
||||
copiedIndex,
|
||||
groupedExecutions,
|
||||
totalGroupedItems,
|
||||
handleCopyGroupedItem,
|
||||
handleDownloadGroupedItem,
|
||||
copiedKey,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -8,16 +8,28 @@ import { useState } from "react";
|
||||
import { NodeDataViewer } from "./NodeDataViewer/NodeDataViewer";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { CheckIcon, CopyIcon } from "@phosphor-icons/react";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import {
|
||||
NodeDataType,
|
||||
getExecutionEntries,
|
||||
normalizeToArray,
|
||||
} from "../helpers";
|
||||
|
||||
export const ViewMoreData = ({
|
||||
outputData,
|
||||
execId,
|
||||
nodeId,
|
||||
dataType = "output",
|
||||
}: {
|
||||
outputData: Record<string, Array<any>>;
|
||||
execId?: string;
|
||||
nodeId: string;
|
||||
dataType?: NodeDataType;
|
||||
}) => {
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
const { toast } = useToast();
|
||||
const executionResults = useNodeStore(
|
||||
useShallow((state) => state.getNodeExecutionResults(nodeId)),
|
||||
);
|
||||
|
||||
const reversedExecutionResults = [...executionResults].reverse();
|
||||
|
||||
const handleCopy = (key: string, value: any) => {
|
||||
const textToCopy =
|
||||
@@ -29,8 +41,8 @@ export const ViewMoreData = ({
|
||||
setTimeout(() => setCopiedKey(null), 2000);
|
||||
};
|
||||
|
||||
const copyExecutionId = () => {
|
||||
navigator.clipboard.writeText(execId || "N/A").then(() => {
|
||||
const copyExecutionId = (executionId: string) => {
|
||||
navigator.clipboard.writeText(executionId || "N/A").then(() => {
|
||||
toast({
|
||||
title: "Execution ID copied to clipboard!",
|
||||
duration: 2000,
|
||||
@@ -42,7 +54,7 @@ export const ViewMoreData = ({
|
||||
<Dialog styling={{ width: "600px", paddingRight: "16px" }}>
|
||||
<Dialog.Trigger>
|
||||
<Button
|
||||
variant="primary"
|
||||
variant="secondary"
|
||||
size="small"
|
||||
className="h-fit w-fit min-w-0 !text-xs"
|
||||
>
|
||||
@@ -52,83 +64,114 @@ export const ViewMoreData = ({
|
||||
<Dialog.Content>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Text variant="h4" className="text-slate-900">
|
||||
Complete Output Data
|
||||
Complete {dataType === "input" ? "Input" : "Output"} Data
|
||||
</Text>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
{execId}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={copyExecutionId}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<ScrollArea className="h-full">
|
||||
<div className="flex flex-col gap-4">
|
||||
{Object.entries(outputData).map(([key, value]) => (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
{reversedExecutionResults.map((result) => (
|
||||
<div
|
||||
key={result.node_exec_id}
|
||||
className="rounded-3xl border border-slate-200 bg-white p-4 shadow-sm"
|
||||
>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="text-slate-600">
|
||||
Execution ID:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
className="rounded-full border border-gray-300 bg-gray-50 px-2 py-1 font-mono text-xs"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text variant="body-medium" className="text-slate-700">
|
||||
{beautifyString(key)}
|
||||
{result.node_exec_id}
|
||||
</Text>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={() => copyExecutionId(result.node_exec_id)}
|
||||
className="h-6 w-6 min-w-0 p-0"
|
||||
>
|
||||
<CopyIcon size={14} />
|
||||
</Button>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{value.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer value={item} shortContent={false} />
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={value}
|
||||
pinName={key}
|
||||
execId={execId}
|
||||
isViewMoreData={true}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => handleCopy(key, value)}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey === key &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey === key ? (
|
||||
<CheckIcon size={16} className="text-green-600" />
|
||||
) : (
|
||||
<CopyIcon size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-4 flex flex-col gap-4">
|
||||
{getExecutionEntries(result, dataType).map(
|
||||
([key, value]) => {
|
||||
const normalizedValue = normalizeToArray(value);
|
||||
return (
|
||||
<div key={key} className="flex flex-col gap-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Pin:
|
||||
</Text>
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="text-slate-700"
|
||||
>
|
||||
{beautifyString(key)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="w-full space-y-2">
|
||||
<Text
|
||||
variant="body-medium"
|
||||
className="!font-semibold text-slate-600"
|
||||
>
|
||||
Data:
|
||||
</Text>
|
||||
<div className="relative space-y-2">
|
||||
{normalizedValue.map((item, index) => (
|
||||
<div key={index}>
|
||||
<ContentRenderer
|
||||
value={item}
|
||||
shortContent={false}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
<div className="mt-1 flex justify-end gap-1">
|
||||
<NodeDataViewer
|
||||
data={normalizedValue}
|
||||
pinName={key}
|
||||
execId={result.node_exec_id}
|
||||
isViewMoreData={true}
|
||||
dataType={dataType}
|
||||
/>
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() =>
|
||||
handleCopy(
|
||||
`${result.node_exec_id}-${key}`,
|
||||
normalizedValue,
|
||||
)
|
||||
}
|
||||
className={cn(
|
||||
"h-fit min-w-0 gap-1.5 border border-zinc-200 p-2 text-black hover:text-slate-900",
|
||||
copiedKey ===
|
||||
`${result.node_exec_id}-${key}` &&
|
||||
"border-green-400 bg-green-100 hover:border-green-400 hover:bg-green-200",
|
||||
)}
|
||||
>
|
||||
{copiedKey ===
|
||||
`${result.node_exec_id}-${key}` ? (
|
||||
<CheckIcon
|
||||
size={16}
|
||||
className="text-green-600"
|
||||
/>
|
||||
) : (
|
||||
<CopyIcon size={16} />
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -0,0 +1,83 @@
|
||||
import type { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
|
||||
import { globalRegistry } from "@/components/contextual/OutputRenderers";
|
||||
import React from "react";
|
||||
|
||||
export type NodeDataType = "input" | "output";
|
||||
|
||||
export type OutputItem = {
|
||||
key: string;
|
||||
value: unknown;
|
||||
metadata?: OutputMetadata;
|
||||
renderer: any;
|
||||
};
|
||||
|
||||
export const normalizeToArray = (value: unknown) => {
|
||||
if (value === undefined) return [];
|
||||
return Array.isArray(value) ? value : [value];
|
||||
};
|
||||
|
||||
export const getExecutionData = (
|
||||
result: NodeExecutionResult,
|
||||
dataType: NodeDataType,
|
||||
pinName: string,
|
||||
) => {
|
||||
if (dataType === "input") {
|
||||
return result.input_data;
|
||||
}
|
||||
|
||||
return result.output_data?.[pinName];
|
||||
};
|
||||
|
||||
export const createOutputItems = (dataArray: unknown[]): Array<OutputItem> => {
|
||||
const items: Array<OutputItem> = [];
|
||||
|
||||
dataArray.forEach((value, index) => {
|
||||
const metadata: OutputMetadata = {};
|
||||
|
||||
if (
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
!React.isValidElement(value)
|
||||
) {
|
||||
const objValue = value as any;
|
||||
if (objValue.type) metadata.type = objValue.type;
|
||||
if (objValue.mimeType) metadata.mimeType = objValue.mimeType;
|
||||
if (objValue.filename) metadata.filename = objValue.filename;
|
||||
if (objValue.language) metadata.language = objValue.language;
|
||||
}
|
||||
|
||||
const renderer = globalRegistry.getRenderer(value, metadata);
|
||||
if (renderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
value,
|
||||
metadata,
|
||||
renderer,
|
||||
});
|
||||
} else {
|
||||
const textRenderer = globalRegistry
|
||||
.getAllRenderers()
|
||||
.find((r) => r.name === "TextRenderer");
|
||||
if (textRenderer) {
|
||||
items.push({
|
||||
key: `item-${index}`,
|
||||
value:
|
||||
typeof value === "string" ? value : JSON.stringify(value, null, 2),
|
||||
metadata,
|
||||
renderer: textRenderer,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return items;
|
||||
};
|
||||
|
||||
export const getExecutionEntries = (
|
||||
result: NodeExecutionResult,
|
||||
dataType: NodeDataType,
|
||||
) => {
|
||||
const data = dataType === "input" ? result.input_data : result.output_data;
|
||||
return Object.entries(data || {});
|
||||
};
|
||||
@@ -7,15 +7,18 @@ export const useNodeOutput = (nodeId: string) => {
|
||||
const [copiedKey, setCopiedKey] = useState<string | null>(null);
|
||||
const { toast } = useToast();
|
||||
|
||||
const nodeExecutionResult = useNodeStore(
|
||||
useShallow((state) => state.getNodeExecutionResult(nodeId)),
|
||||
const latestResult = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeExecutionResult(nodeId)),
|
||||
);
|
||||
|
||||
const inputData = nodeExecutionResult?.input_data;
|
||||
const latestInputData = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeInputData(nodeId)),
|
||||
);
|
||||
|
||||
const latestOutputData: Record<string, Array<any>> = useNodeStore(
|
||||
useShallow((state) => state.getLatestNodeOutputData(nodeId) || {}),
|
||||
);
|
||||
|
||||
const outputData: Record<string, Array<any>> = {
|
||||
...nodeExecutionResult?.output_data,
|
||||
};
|
||||
const handleCopy = async (key: string, value: any) => {
|
||||
try {
|
||||
const text = JSON.stringify(value, null, 2);
|
||||
@@ -35,11 +38,12 @@ export const useNodeOutput = (nodeId: string) => {
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
outputData,
|
||||
inputData,
|
||||
latestOutputData,
|
||||
latestInputData,
|
||||
copiedKey,
|
||||
handleCopy,
|
||||
executionResultId: nodeExecutionResult?.node_exec_id,
|
||||
executionResultId: latestResult?.node_exec_id,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import { useState, useCallback, useEffect } from "react";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import {
|
||||
useNodeStore,
|
||||
NodeResolutionData,
|
||||
} from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
||||
import {
|
||||
useSubAgentUpdate,
|
||||
@@ -13,6 +10,7 @@ import {
|
||||
} from "@/app/(platform)/build/hooks/useSubAgentUpdate";
|
||||
import { GraphInputSchema, GraphOutputSchema } from "@/lib/autogpt-server-api";
|
||||
import { CustomNodeData } from "../../CustomNode";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/types";
|
||||
|
||||
// Stable empty set to avoid creating new references in selectors
|
||||
const EMPTY_SET: Set<string> = new Set();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { NodeResolutionData } from "@/app/(platform)/build/stores/types";
|
||||
import { RJSFSchema } from "@rjsf/utils";
|
||||
|
||||
export const nodeStyleBasedOnStatus: Record<AgentExecutionStatus, string> = {
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
export const accumulateExecutionData = (
|
||||
accumulated: Record<string, unknown[]>,
|
||||
data: Record<string, unknown> | undefined,
|
||||
) => {
|
||||
if (!data) return { ...accumulated };
|
||||
const next = { ...accumulated };
|
||||
Object.entries(data).forEach(([key, values]) => {
|
||||
const nextValues = Array.isArray(values) ? values : [values];
|
||||
if (next[key]) {
|
||||
next[key] = [...next[key], ...nextValues];
|
||||
} else {
|
||||
next[key] = [...nextValues];
|
||||
}
|
||||
});
|
||||
return next;
|
||||
};
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
import { Node } from "@/app/api/__generated__/models/node";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import { NodeExecutionResultInputData } from "@/app/api/__generated__/models/nodeExecutionResultInputData";
|
||||
import { NodeExecutionResultOutputData } from "@/app/api/__generated__/models/nodeExecutionResultOutputData";
|
||||
import { useHistoryStore } from "./historyStore";
|
||||
import { useEdgeStore } from "./edgeStore";
|
||||
import { BlockUIType } from "../components/types";
|
||||
@@ -18,31 +20,10 @@ import {
|
||||
ensurePathExists,
|
||||
parseHandleIdToPath,
|
||||
} from "@/components/renderers/InputRenderer/helpers";
|
||||
import { IncompatibilityInfo } from "../hooks/useSubAgentUpdate/types";
|
||||
import { accumulateExecutionData } from "./helpers";
|
||||
import { NodeResolutionData } from "./types";
|
||||
|
||||
// Resolution mode data stored per node
|
||||
export type NodeResolutionData = {
|
||||
incompatibilities: IncompatibilityInfo;
|
||||
// The NEW schema from the update (what we're updating TO)
|
||||
pendingUpdate: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
// The OLD schema before the update (what we're updating FROM)
|
||||
// Needed to merge and show removed inputs during resolution
|
||||
currentSchema: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
// The full updated hardcoded values to apply when resolution completes
|
||||
pendingHardcodedValues: Record<string, unknown>;
|
||||
};
|
||||
|
||||
// Minimum movement (in pixels) required before logging position change to history
|
||||
// Prevents spamming history with small movements when clicking on inputs inside blocks
|
||||
const MINIMUM_MOVE_BEFORE_LOG = 50;
|
||||
|
||||
// Track initial positions when drag starts (outside store to avoid re-renders)
|
||||
const dragStartPositions: Record<string, XYPosition> = {};
|
||||
|
||||
let dragStartState: { nodes: CustomNode[]; edges: CustomEdge[] } | null = null;
|
||||
@@ -52,6 +33,15 @@ type NodeStore = {
|
||||
nodeCounter: number;
|
||||
setNodeCounter: (nodeCounter: number) => void;
|
||||
nodeAdvancedStates: Record<string, boolean>;
|
||||
|
||||
latestNodeInputData: Record<string, NodeExecutionResultInputData | undefined>;
|
||||
latestNodeOutputData: Record<
|
||||
string,
|
||||
NodeExecutionResultOutputData | undefined
|
||||
>;
|
||||
accumulatedNodeInputData: Record<string, Record<string, unknown[]>>;
|
||||
accumulatedNodeOutputData: Record<string, Record<string, unknown[]>>;
|
||||
|
||||
setNodes: (nodes: CustomNode[]) => void;
|
||||
onNodesChange: (changes: NodeChange<CustomNode>[]) => void;
|
||||
addNode: (node: CustomNode) => void;
|
||||
@@ -72,12 +62,26 @@ type NodeStore = {
|
||||
|
||||
updateNodeStatus: (nodeId: string, status: AgentExecutionStatus) => void;
|
||||
getNodeStatus: (nodeId: string) => AgentExecutionStatus | undefined;
|
||||
cleanNodesStatuses: () => void;
|
||||
|
||||
updateNodeExecutionResult: (
|
||||
nodeId: string,
|
||||
result: NodeExecutionResult,
|
||||
) => void;
|
||||
getNodeExecutionResult: (nodeId: string) => NodeExecutionResult | undefined;
|
||||
getNodeExecutionResults: (nodeId: string) => NodeExecutionResult[];
|
||||
getLatestNodeInputData: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResultInputData | undefined;
|
||||
getLatestNodeOutputData: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResultOutputData | undefined;
|
||||
getAccumulatedNodeInputData: (nodeId: string) => Record<string, unknown[]>;
|
||||
getAccumulatedNodeOutputData: (nodeId: string) => Record<string, unknown[]>;
|
||||
getLatestNodeExecutionResult: (
|
||||
nodeId: string,
|
||||
) => NodeExecutionResult | undefined;
|
||||
clearAllNodeExecutionResults: () => void;
|
||||
|
||||
getNodeBlockUIType: (nodeId: string) => BlockUIType;
|
||||
hasWebhookNodes: () => boolean;
|
||||
|
||||
@@ -122,6 +126,10 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
nodeCounter: 0,
|
||||
setNodeCounter: (nodeCounter) => set({ nodeCounter }),
|
||||
nodeAdvancedStates: {},
|
||||
latestNodeInputData: {},
|
||||
latestNodeOutputData: {},
|
||||
accumulatedNodeInputData: {},
|
||||
accumulatedNodeOutputData: {},
|
||||
incrementNodeCounter: () =>
|
||||
set((state) => ({
|
||||
nodeCounter: state.nodeCounter + 1,
|
||||
@@ -317,17 +325,162 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
return get().nodes.find((n) => n.id === nodeId)?.data?.status;
|
||||
},
|
||||
|
||||
updateNodeExecutionResult: (nodeId: string, result: NodeExecutionResult) => {
|
||||
cleanNodesStatuses: () => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) =>
|
||||
n.id === nodeId
|
||||
? { ...n, data: { ...n.data, nodeExecutionResult: result } }
|
||||
: n,
|
||||
),
|
||||
nodes: state.nodes.map((n) => ({
|
||||
...n,
|
||||
data: { ...n.data, status: undefined },
|
||||
})),
|
||||
}));
|
||||
},
|
||||
getNodeExecutionResult: (nodeId: string) => {
|
||||
return get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResult;
|
||||
|
||||
updateNodeExecutionResult: (nodeId: string, result: NodeExecutionResult) => {
|
||||
set((state) => {
|
||||
let latestNodeInputData = state.latestNodeInputData;
|
||||
let latestNodeOutputData = state.latestNodeOutputData;
|
||||
let accumulatedNodeInputData = state.accumulatedNodeInputData;
|
||||
let accumulatedNodeOutputData = state.accumulatedNodeOutputData;
|
||||
|
||||
const nodes = state.nodes.map((n) => {
|
||||
if (n.id !== nodeId) return n;
|
||||
|
||||
const existingResults = n.data.nodeExecutionResults || [];
|
||||
const duplicateIndex = existingResults.findIndex(
|
||||
(r) => r.node_exec_id === result.node_exec_id,
|
||||
);
|
||||
|
||||
if (duplicateIndex !== -1) {
|
||||
const oldResult = existingResults[duplicateIndex];
|
||||
const inputDataChanged =
|
||||
JSON.stringify(oldResult.input_data) !==
|
||||
JSON.stringify(result.input_data);
|
||||
const outputDataChanged =
|
||||
JSON.stringify(oldResult.output_data) !==
|
||||
JSON.stringify(result.output_data);
|
||||
|
||||
if (!inputDataChanged && !outputDataChanged) {
|
||||
return n;
|
||||
}
|
||||
|
||||
const updatedResults = [...existingResults];
|
||||
updatedResults[duplicateIndex] = result;
|
||||
|
||||
const recomputedAccumulatedInput = updatedResults.reduce(
|
||||
(acc, r) => accumulateExecutionData(acc, r.input_data),
|
||||
{} as Record<string, unknown[]>,
|
||||
);
|
||||
const recomputedAccumulatedOutput = updatedResults.reduce(
|
||||
(acc, r) => accumulateExecutionData(acc, r.output_data),
|
||||
{} as Record<string, unknown[]>,
|
||||
);
|
||||
|
||||
const mostRecentResult = updatedResults[updatedResults.length - 1];
|
||||
latestNodeInputData = {
|
||||
...latestNodeInputData,
|
||||
[nodeId]: mostRecentResult.input_data,
|
||||
};
|
||||
latestNodeOutputData = {
|
||||
...latestNodeOutputData,
|
||||
[nodeId]: mostRecentResult.output_data,
|
||||
};
|
||||
|
||||
accumulatedNodeInputData = {
|
||||
...accumulatedNodeInputData,
|
||||
[nodeId]: recomputedAccumulatedInput,
|
||||
};
|
||||
accumulatedNodeOutputData = {
|
||||
...accumulatedNodeOutputData,
|
||||
[nodeId]: recomputedAccumulatedOutput,
|
||||
};
|
||||
|
||||
return {
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: updatedResults,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
accumulatedNodeInputData = {
|
||||
...accumulatedNodeInputData,
|
||||
[nodeId]: accumulateExecutionData(
|
||||
accumulatedNodeInputData[nodeId] || {},
|
||||
result.input_data,
|
||||
),
|
||||
};
|
||||
accumulatedNodeOutputData = {
|
||||
...accumulatedNodeOutputData,
|
||||
[nodeId]: accumulateExecutionData(
|
||||
accumulatedNodeOutputData[nodeId] || {},
|
||||
result.output_data,
|
||||
),
|
||||
};
|
||||
|
||||
latestNodeInputData = {
|
||||
...latestNodeInputData,
|
||||
[nodeId]: result.input_data,
|
||||
};
|
||||
latestNodeOutputData = {
|
||||
...latestNodeOutputData,
|
||||
[nodeId]: result.output_data,
|
||||
};
|
||||
|
||||
return {
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: [...existingResults, result],
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
nodes,
|
||||
latestNodeInputData,
|
||||
latestNodeOutputData,
|
||||
accumulatedNodeInputData,
|
||||
accumulatedNodeOutputData,
|
||||
};
|
||||
});
|
||||
},
|
||||
getNodeExecutionResults: (nodeId: string) => {
|
||||
return (
|
||||
get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResults || []
|
||||
);
|
||||
},
|
||||
getLatestNodeInputData: (nodeId: string) => {
|
||||
return get().latestNodeInputData[nodeId];
|
||||
},
|
||||
getLatestNodeOutputData: (nodeId: string) => {
|
||||
return get().latestNodeOutputData[nodeId];
|
||||
},
|
||||
getAccumulatedNodeInputData: (nodeId: string) => {
|
||||
return get().accumulatedNodeInputData[nodeId] || {};
|
||||
},
|
||||
getAccumulatedNodeOutputData: (nodeId: string) => {
|
||||
return get().accumulatedNodeOutputData[nodeId] || {};
|
||||
},
|
||||
getLatestNodeExecutionResult: (nodeId: string) => {
|
||||
const results =
|
||||
get().nodes.find((n) => n.id === nodeId)?.data?.nodeExecutionResults ||
|
||||
[];
|
||||
return results.length > 0 ? results[results.length - 1] : undefined;
|
||||
},
|
||||
clearAllNodeExecutionResults: () => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) => ({
|
||||
...n,
|
||||
data: {
|
||||
...n.data,
|
||||
nodeExecutionResults: [],
|
||||
},
|
||||
})),
|
||||
latestNodeInputData: {},
|
||||
latestNodeOutputData: {},
|
||||
accumulatedNodeInputData: {},
|
||||
accumulatedNodeOutputData: {},
|
||||
}));
|
||||
},
|
||||
getNodeBlockUIType: (nodeId: string) => {
|
||||
return (
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
import { IncompatibilityInfo } from "../hooks/useSubAgentUpdate/types";
|
||||
|
||||
export type NodeResolutionData = {
|
||||
incompatibilities: IncompatibilityInfo;
|
||||
pendingUpdate: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
currentSchema: {
|
||||
input_schema: Record<string, unknown>;
|
||||
output_schema: Record<string, unknown>;
|
||||
};
|
||||
pendingHardcodedValues: Record<string, unknown>;
|
||||
};
|
||||
@@ -1,41 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { createContext, useContext, useRef, type ReactNode } from "react";
|
||||
|
||||
interface NewChatContextValue {
|
||||
onNewChatClick: () => void;
|
||||
setOnNewChatClick: (handler?: () => void) => void;
|
||||
performNewChat?: () => void;
|
||||
setPerformNewChat: (handler?: () => void) => void;
|
||||
}
|
||||
|
||||
const NewChatContext = createContext<NewChatContextValue | null>(null);
|
||||
|
||||
export function NewChatProvider({ children }: { children: ReactNode }) {
|
||||
const onNewChatRef = useRef<(() => void) | undefined>();
|
||||
const performNewChatRef = useRef<(() => void) | undefined>();
|
||||
const contextValueRef = useRef<NewChatContextValue>({
|
||||
onNewChatClick() {
|
||||
onNewChatRef.current?.();
|
||||
},
|
||||
setOnNewChatClick(handler?: () => void) {
|
||||
onNewChatRef.current = handler;
|
||||
},
|
||||
performNewChat() {
|
||||
performNewChatRef.current?.();
|
||||
},
|
||||
setPerformNewChat(handler?: () => void) {
|
||||
performNewChatRef.current = handler;
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<NewChatContext.Provider value={contextValueRef.current}>
|
||||
{children}
|
||||
</NewChatContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useNewChat() {
|
||||
return useContext(NewChatContext);
|
||||
}
|
||||
@@ -4,7 +4,7 @@ import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/C
|
||||
import { NAVBAR_HEIGHT_PX } from "@/lib/constants";
|
||||
import type { ReactNode } from "react";
|
||||
import { useEffect } from "react";
|
||||
import { useNewChat } from "../../NewChatContext";
|
||||
import { useCopilotStore } from "../../copilot-page-store";
|
||||
import { DesktopSidebar } from "./components/DesktopSidebar/DesktopSidebar";
|
||||
import { LoadingState } from "./components/LoadingState/LoadingState";
|
||||
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
|
||||
@@ -35,21 +35,23 @@ export function CopilotShell({ children }: Props) {
|
||||
isReadyToShowContent,
|
||||
} = useCopilotShell();
|
||||
|
||||
const newChatContext = useNewChat();
|
||||
const handleNewChatClickWrapper =
|
||||
newChatContext?.onNewChatClick || handleNewChat;
|
||||
const setNewChatHandler = useCopilotStore((s) => s.setNewChatHandler);
|
||||
const requestNewChat = useCopilotStore((s) => s.requestNewChat);
|
||||
|
||||
useEffect(
|
||||
function registerNewChatHandler() {
|
||||
if (!newChatContext) return;
|
||||
newChatContext.setPerformNewChat(handleNewChat);
|
||||
setNewChatHandler(handleNewChat);
|
||||
return function cleanup() {
|
||||
newChatContext.setPerformNewChat(undefined);
|
||||
setNewChatHandler(null);
|
||||
};
|
||||
},
|
||||
[newChatContext, handleNewChat],
|
||||
[handleNewChat],
|
||||
);
|
||||
|
||||
function handleNewChatClick() {
|
||||
requestNewChat();
|
||||
}
|
||||
|
||||
if (!isLoggedIn) {
|
||||
return (
|
||||
<div className="flex h-full items-center justify-center">
|
||||
@@ -72,7 +74,7 @@ export function CopilotShell({ children }: Props) {
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
onSelectSession={handleSelectSession}
|
||||
onFetchNextPage={fetchNextPage}
|
||||
onNewChat={handleNewChatClickWrapper}
|
||||
onNewChat={handleNewChatClick}
|
||||
hasActiveSession={Boolean(hasActiveSession)}
|
||||
/>
|
||||
)}
|
||||
@@ -94,7 +96,7 @@ export function CopilotShell({ children }: Props) {
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
onSelectSession={handleSelectSession}
|
||||
onFetchNextPage={fetchNextPage}
|
||||
onNewChat={handleNewChatClickWrapper}
|
||||
onNewChat={handleNewChatClick}
|
||||
onClose={handleCloseDrawer}
|
||||
onOpenChange={handleDrawerOpenChange}
|
||||
hasActiveSession={Boolean(hasActiveSession)}
|
||||
|
||||
@@ -1,7 +1,12 @@
|
||||
import { useGetV2ListSessions } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import {
|
||||
getGetV2ListSessionsQueryKey,
|
||||
useGetV2ListSessions,
|
||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||
import { okData } from "@/app/api/helpers";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useChatStore } from "@/components/contextual/Chat/chat-store";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
const PAGE_SIZE = 50;
|
||||
|
||||
@@ -15,6 +20,8 @@ export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) {
|
||||
SessionSummaryResponse[]
|
||||
>([]);
|
||||
const [totalCount, setTotalCount] = useState<number | null>(null);
|
||||
const queryClient = useQueryClient();
|
||||
const onStreamComplete = useChatStore((state) => state.onStreamComplete);
|
||||
|
||||
const { data, isLoading, isFetching, isError } = useGetV2ListSessions(
|
||||
{ limit: PAGE_SIZE, offset },
|
||||
@@ -25,35 +32,47 @@ export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) {
|
||||
},
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const responseData = okData(data);
|
||||
if (responseData) {
|
||||
const newSessions = responseData.sessions;
|
||||
const total = responseData.total;
|
||||
setTotalCount(total);
|
||||
|
||||
if (offset === 0) {
|
||||
setAccumulatedSessions(newSessions);
|
||||
} else {
|
||||
setAccumulatedSessions((prev) => [...prev, ...newSessions]);
|
||||
}
|
||||
} else if (!enabled) {
|
||||
useEffect(function refreshOnStreamComplete() {
|
||||
const unsubscribe = onStreamComplete(function handleStreamComplete() {
|
||||
setOffset(0);
|
||||
setAccumulatedSessions([]);
|
||||
setTotalCount(null);
|
||||
}
|
||||
}, [data, offset, enabled]);
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListSessionsQueryKey(),
|
||||
});
|
||||
});
|
||||
return unsubscribe;
|
||||
}, []);
|
||||
|
||||
const hasNextPage = useMemo(() => {
|
||||
if (totalCount === null) return false;
|
||||
return accumulatedSessions.length < totalCount;
|
||||
}, [accumulatedSessions.length, totalCount]);
|
||||
useEffect(
|
||||
function updateSessionsFromResponse() {
|
||||
const responseData = okData(data);
|
||||
if (responseData) {
|
||||
const newSessions = responseData.sessions;
|
||||
const total = responseData.total;
|
||||
setTotalCount(total);
|
||||
|
||||
const areAllSessionsLoaded = useMemo(() => {
|
||||
if (totalCount === null) return false;
|
||||
return (
|
||||
accumulatedSessions.length >= totalCount && !isFetching && !isLoading
|
||||
);
|
||||
}, [accumulatedSessions.length, totalCount, isFetching, isLoading]);
|
||||
if (offset === 0) {
|
||||
setAccumulatedSessions(newSessions);
|
||||
} else {
|
||||
setAccumulatedSessions((prev) => [...prev, ...newSessions]);
|
||||
}
|
||||
} else if (!enabled) {
|
||||
setAccumulatedSessions([]);
|
||||
setTotalCount(null);
|
||||
}
|
||||
},
|
||||
[data, offset, enabled],
|
||||
);
|
||||
|
||||
const hasNextPage =
|
||||
totalCount !== null && accumulatedSessions.length < totalCount;
|
||||
|
||||
const areAllSessionsLoaded =
|
||||
totalCount !== null &&
|
||||
accumulatedSessions.length >= totalCount &&
|
||||
!isFetching &&
|
||||
!isLoading;
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
|
||||
@@ -2,9 +2,7 @@ import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessi
|
||||
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||
import { format, formatDistanceToNow, isToday } from "date-fns";
|
||||
|
||||
export function convertSessionDetailToSummary(
|
||||
session: SessionDetailResponse,
|
||||
): SessionSummaryResponse {
|
||||
export function convertSessionDetailToSummary(session: SessionDetailResponse) {
|
||||
return {
|
||||
id: session.id,
|
||||
created_at: session.created_at,
|
||||
@@ -13,17 +11,25 @@ export function convertSessionDetailToSummary(
|
||||
};
|
||||
}
|
||||
|
||||
export function filterVisibleSessions(
|
||||
sessions: SessionSummaryResponse[],
|
||||
): SessionSummaryResponse[] {
|
||||
return sessions.filter(
|
||||
(session) => session.updated_at !== session.created_at,
|
||||
);
|
||||
export function filterVisibleSessions(sessions: SessionSummaryResponse[]) {
|
||||
const fiveMinutesAgo = Date.now() - 5 * 60 * 1000;
|
||||
return sessions.filter((session) => {
|
||||
const hasBeenUpdated = session.updated_at !== session.created_at;
|
||||
|
||||
if (hasBeenUpdated) return true;
|
||||
|
||||
const isRecentlyCreated =
|
||||
new Date(session.created_at).getTime() > fiveMinutesAgo;
|
||||
|
||||
return isRecentlyCreated;
|
||||
});
|
||||
}
|
||||
|
||||
export function getSessionTitle(session: SessionSummaryResponse): string {
|
||||
export function getSessionTitle(session: SessionSummaryResponse) {
|
||||
if (session.title) return session.title;
|
||||
|
||||
const isNewSession = session.updated_at === session.created_at;
|
||||
|
||||
if (isNewSession) {
|
||||
const createdDate = new Date(session.created_at);
|
||||
if (isToday(createdDate)) {
|
||||
@@ -31,12 +37,11 @@ export function getSessionTitle(session: SessionSummaryResponse): string {
|
||||
}
|
||||
return format(createdDate, "MMM d, yyyy");
|
||||
}
|
||||
|
||||
return "Untitled Chat";
|
||||
}
|
||||
|
||||
export function getSessionUpdatedLabel(
|
||||
session: SessionSummaryResponse,
|
||||
): string {
|
||||
export function getSessionUpdatedLabel(session: SessionSummaryResponse) {
|
||||
if (!session.updated_at) return "";
|
||||
return formatDistanceToNow(new Date(session.updated_at), { addSuffix: true });
|
||||
}
|
||||
@@ -45,8 +50,10 @@ export function mergeCurrentSessionIntoList(
|
||||
accumulatedSessions: SessionSummaryResponse[],
|
||||
currentSessionId: string | null,
|
||||
currentSessionData: SessionDetailResponse | null | undefined,
|
||||
): SessionSummaryResponse[] {
|
||||
recentlyCreatedSessions?: Map<string, SessionSummaryResponse>,
|
||||
) {
|
||||
const filteredSessions: SessionSummaryResponse[] = [];
|
||||
const addedIds = new Set<string>();
|
||||
|
||||
if (accumulatedSessions.length > 0) {
|
||||
const visibleSessions = filterVisibleSessions(accumulatedSessions);
|
||||
@@ -61,29 +68,40 @@ export function mergeCurrentSessionIntoList(
|
||||
);
|
||||
if (!isInVisible) {
|
||||
filteredSessions.push(currentInAll);
|
||||
addedIds.add(currentInAll.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
filteredSessions.push(...visibleSessions);
|
||||
for (const session of visibleSessions) {
|
||||
if (!addedIds.has(session.id)) {
|
||||
filteredSessions.push(session);
|
||||
addedIds.add(session.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentSessionId && currentSessionData) {
|
||||
const isCurrentInList = filteredSessions.some(
|
||||
(s) => s.id === currentSessionId,
|
||||
);
|
||||
if (!isCurrentInList) {
|
||||
if (!addedIds.has(currentSessionId)) {
|
||||
const summarySession = convertSessionDetailToSummary(currentSessionData);
|
||||
filteredSessions.unshift(summarySession);
|
||||
addedIds.add(currentSessionId);
|
||||
}
|
||||
}
|
||||
|
||||
if (recentlyCreatedSessions) {
|
||||
for (const [sessionId, sessionData] of recentlyCreatedSessions) {
|
||||
if (!addedIds.has(sessionId)) {
|
||||
filteredSessions.unshift(sessionData);
|
||||
addedIds.add(sessionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return filteredSessions;
|
||||
}
|
||||
|
||||
export function getCurrentSessionId(
|
||||
searchParams: URLSearchParams,
|
||||
): string | null {
|
||||
export function getCurrentSessionId(searchParams: URLSearchParams) {
|
||||
return searchParams.get("sessionId");
|
||||
}
|
||||
|
||||
@@ -95,11 +113,7 @@ export function shouldAutoSelectSession(
|
||||
accumulatedSessions: SessionSummaryResponse[],
|
||||
isLoading: boolean,
|
||||
totalCount: number | null,
|
||||
): {
|
||||
shouldSelect: boolean;
|
||||
sessionIdToSelect: string | null;
|
||||
shouldCreate: boolean;
|
||||
} {
|
||||
) {
|
||||
if (!areAllSessionsLoaded || hasAutoSelectedSession) {
|
||||
return {
|
||||
shouldSelect: false,
|
||||
@@ -146,7 +160,7 @@ export function checkReadyToShowContent(
|
||||
isCurrentSessionLoading: boolean,
|
||||
currentSessionData: SessionDetailResponse | null | undefined,
|
||||
hasAutoSelectedSession: boolean,
|
||||
): boolean {
|
||||
) {
|
||||
if (!areAllSessionsLoaded) return false;
|
||||
|
||||
if (paramSessionId) {
|
||||
|
||||
@@ -4,23 +4,25 @@ import {
|
||||
getGetV2ListSessionsQueryKey,
|
||||
useGetV2GetSession,
|
||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||
import { okData } from "@/app/api/helpers";
|
||||
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||
import { parseAsString, useQueryState } from "nuqs";
|
||||
import { usePathname, useSearchParams } from "next/navigation";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useMobileDrawer } from "./components/MobileDrawer/useMobileDrawer";
|
||||
import { useSessionsPagination } from "./components/SessionsList/useSessionsPagination";
|
||||
import {
|
||||
checkReadyToShowContent,
|
||||
convertSessionDetailToSummary,
|
||||
filterVisibleSessions,
|
||||
getCurrentSessionId,
|
||||
mergeCurrentSessionIntoList,
|
||||
} from "./helpers";
|
||||
|
||||
export function useCopilotShell() {
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
const queryClient = useQueryClient();
|
||||
@@ -29,6 +31,8 @@ export function useCopilotShell() {
|
||||
const isMobile =
|
||||
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
|
||||
|
||||
const [, setUrlSessionId] = useQueryState("sessionId", parseAsString);
|
||||
|
||||
const isOnHomepage = pathname === "/copilot";
|
||||
const paramSessionId = searchParams.get("sessionId");
|
||||
|
||||
@@ -65,6 +69,9 @@ export function useCopilotShell() {
|
||||
|
||||
const [hasAutoSelectedSession, setHasAutoSelectedSession] = useState(false);
|
||||
const hasAutoSelectedRef = useRef(false);
|
||||
const recentlyCreatedSessionsRef = useRef<
|
||||
Map<string, SessionSummaryResponse>
|
||||
>(new Map());
|
||||
|
||||
// Mark as auto-selected when sessionId is in URL
|
||||
useEffect(() => {
|
||||
@@ -91,6 +98,30 @@ export function useCopilotShell() {
|
||||
}
|
||||
}, [isOnHomepage, paramSessionId, queryClient]);
|
||||
|
||||
// Track newly created sessions to ensure they stay visible even when switching away
|
||||
useEffect(() => {
|
||||
if (currentSessionId && currentSessionData) {
|
||||
const isNewSession =
|
||||
currentSessionData.updated_at === currentSessionData.created_at;
|
||||
const isNotInAccumulated = !accumulatedSessions.some(
|
||||
(s) => s.id === currentSessionId,
|
||||
);
|
||||
if (isNewSession || isNotInAccumulated) {
|
||||
const summary = convertSessionDetailToSummary(currentSessionData);
|
||||
recentlyCreatedSessionsRef.current.set(currentSessionId, summary);
|
||||
}
|
||||
}
|
||||
}, [currentSessionId, currentSessionData, accumulatedSessions]);
|
||||
|
||||
// Clean up recently created sessions that are now in the accumulated list
|
||||
useEffect(() => {
|
||||
for (const sessionId of recentlyCreatedSessionsRef.current.keys()) {
|
||||
if (accumulatedSessions.some((s) => s.id === sessionId)) {
|
||||
recentlyCreatedSessionsRef.current.delete(sessionId);
|
||||
}
|
||||
}
|
||||
}, [accumulatedSessions]);
|
||||
|
||||
// Reset pagination when query becomes disabled
|
||||
const prevPaginationEnabledRef = useRef(paginationEnabled);
|
||||
useEffect(() => {
|
||||
@@ -105,6 +136,7 @@ export function useCopilotShell() {
|
||||
accumulatedSessions,
|
||||
currentSessionId,
|
||||
currentSessionData,
|
||||
recentlyCreatedSessionsRef.current,
|
||||
);
|
||||
|
||||
const visibleSessions = filterVisibleSessions(sessions);
|
||||
@@ -124,22 +156,17 @@ export function useCopilotShell() {
|
||||
);
|
||||
|
||||
function handleSelectSession(sessionId: string) {
|
||||
// Navigate using replaceState to avoid full page reload
|
||||
window.history.replaceState(null, "", `/copilot?sessionId=${sessionId}`);
|
||||
// Force a re-render by updating the URL through router
|
||||
router.replace(`/copilot?sessionId=${sessionId}`);
|
||||
setUrlSessionId(sessionId, { shallow: false });
|
||||
if (isMobile) handleCloseDrawer();
|
||||
}
|
||||
|
||||
function handleNewChat() {
|
||||
resetAutoSelect();
|
||||
resetPagination();
|
||||
// Invalidate and refetch sessions list to ensure newly created sessions appear
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListSessionsQueryKey(),
|
||||
});
|
||||
window.history.replaceState(null, "", "/copilot");
|
||||
router.replace("/copilot");
|
||||
setUrlSessionId(null, { shallow: false });
|
||||
if (isMobile) handleCloseDrawer();
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
"use client";
|
||||
|
||||
import { create } from "zustand";
|
||||
|
||||
interface CopilotStoreState {
|
||||
isStreaming: boolean;
|
||||
isNewChatModalOpen: boolean;
|
||||
newChatHandler: (() => void) | null;
|
||||
}
|
||||
|
||||
interface CopilotStoreActions {
|
||||
setIsStreaming: (isStreaming: boolean) => void;
|
||||
setNewChatHandler: (handler: (() => void) | null) => void;
|
||||
requestNewChat: () => void;
|
||||
confirmNewChat: () => void;
|
||||
cancelNewChat: () => void;
|
||||
}
|
||||
|
||||
type CopilotStore = CopilotStoreState & CopilotStoreActions;
|
||||
|
||||
export const useCopilotStore = create<CopilotStore>((set, get) => ({
|
||||
isStreaming: false,
|
||||
isNewChatModalOpen: false,
|
||||
newChatHandler: null,
|
||||
|
||||
setIsStreaming(isStreaming) {
|
||||
set({ isStreaming });
|
||||
},
|
||||
|
||||
setNewChatHandler(handler) {
|
||||
set({ newChatHandler: handler });
|
||||
},
|
||||
|
||||
requestNewChat() {
|
||||
const { isStreaming, newChatHandler } = get();
|
||||
if (isStreaming) {
|
||||
set({ isNewChatModalOpen: true });
|
||||
} else if (newChatHandler) {
|
||||
newChatHandler();
|
||||
}
|
||||
},
|
||||
|
||||
confirmNewChat() {
|
||||
const { newChatHandler } = get();
|
||||
set({ isNewChatModalOpen: false });
|
||||
if (newChatHandler) {
|
||||
newChatHandler();
|
||||
}
|
||||
},
|
||||
|
||||
cancelNewChat() {
|
||||
set({ isNewChatModalOpen: false });
|
||||
},
|
||||
}));
|
||||
@@ -1,11 +1,6 @@
|
||||
import type { ReactNode } from "react";
|
||||
import { NewChatProvider } from "./NewChatContext";
|
||||
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
||||
|
||||
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
||||
return (
|
||||
<NewChatProvider>
|
||||
<CopilotShell>{children}</CopilotShell>
|
||||
</NewChatProvider>
|
||||
);
|
||||
return <CopilotShell>{children}</CopilotShell>;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
"use client";
|
||||
|
||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
|
||||
import { Skeleton } from "@/components/atoms/Skeleton/Skeleton";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Chat } from "@/components/contextual/Chat/Chat";
|
||||
import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput";
|
||||
import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useCopilotStore } from "./copilot-page-store";
|
||||
import { useCopilotPage } from "./useCopilotPage";
|
||||
|
||||
export default function CopilotPage() {
|
||||
const { state, handlers } = useCopilotPage();
|
||||
const confirmNewChat = useCopilotStore((s) => s.confirmNewChat);
|
||||
const {
|
||||
greetingName,
|
||||
quickActions,
|
||||
@@ -25,15 +28,11 @@ export default function CopilotPage() {
|
||||
handleSessionNotFound,
|
||||
handleStreamingChange,
|
||||
handleCancelNewChat,
|
||||
proceedWithNewChat,
|
||||
handleNewChatModalOpen,
|
||||
} = handlers;
|
||||
|
||||
if (!isReady) {
|
||||
return null;
|
||||
}
|
||||
if (!isReady) return null;
|
||||
|
||||
// Show Chat when we have an active session
|
||||
if (pageState.type === "chat") {
|
||||
return (
|
||||
<div className="flex h-full flex-col">
|
||||
@@ -71,7 +70,7 @@ export default function CopilotPage() {
|
||||
<Button
|
||||
type="button"
|
||||
variant="primary"
|
||||
onClick={proceedWithNewChat}
|
||||
onClick={confirmNewChat}
|
||||
>
|
||||
Start new chat
|
||||
</Button>
|
||||
@@ -83,7 +82,7 @@ export default function CopilotPage() {
|
||||
);
|
||||
}
|
||||
|
||||
if (pageState.type === "newChat") {
|
||||
if (pageState.type === "newChat" || pageState.type === "creating") {
|
||||
return (
|
||||
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
@@ -96,21 +95,6 @@ export default function CopilotPage() {
|
||||
);
|
||||
}
|
||||
|
||||
// Show loading state while creating session and sending first message
|
||||
if (pageState.type === "creating") {
|
||||
return (
|
||||
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<ChatLoader />
|
||||
<Text variant="body" className="text-zinc-500">
|
||||
Loading your chats...
|
||||
</Text>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Show Welcome screen
|
||||
return (
|
||||
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-6 py-10">
|
||||
<div className="w-full text-center">
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { postV2CreateSession } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import {
|
||||
getGetV2ListSessionsQueryKey,
|
||||
postV2CreateSession,
|
||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
@@ -8,25 +11,22 @@ import {
|
||||
useGetFlag,
|
||||
} from "@/services/feature-flags/use-get-flag";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useReducer } from "react";
|
||||
import { useNewChat } from "./NewChatContext";
|
||||
import { useCopilotStore } from "./copilot-page-store";
|
||||
import { getGreetingName, getQuickActions, type PageState } from "./helpers";
|
||||
import { useCopilotURLState } from "./useCopilotURLState";
|
||||
|
||||
type CopilotState = {
|
||||
pageState: PageState;
|
||||
isStreaming: boolean;
|
||||
isNewChatModalOpen: boolean;
|
||||
initialPrompts: Record<string, string>;
|
||||
previousSessionId: string | null;
|
||||
};
|
||||
|
||||
type CopilotAction =
|
||||
| { type: "setPageState"; pageState: PageState }
|
||||
| { type: "setStreaming"; isStreaming: boolean }
|
||||
| { type: "setNewChatModalOpen"; isOpen: boolean }
|
||||
| { type: "setInitialPrompt"; sessionId: string; prompt: string }
|
||||
| { type: "setPreviousSessionId"; sessionId: string | null };
|
||||
|
||||
@@ -52,14 +52,6 @@ function copilotReducer(
|
||||
if (isSamePageState(action.pageState, state.pageState)) return state;
|
||||
return { ...state, pageState: action.pageState };
|
||||
}
|
||||
if (action.type === "setStreaming") {
|
||||
if (action.isStreaming === state.isStreaming) return state;
|
||||
return { ...state, isStreaming: action.isStreaming };
|
||||
}
|
||||
if (action.type === "setNewChatModalOpen") {
|
||||
if (action.isOpen === state.isNewChatModalOpen) return state;
|
||||
return { ...state, isNewChatModalOpen: action.isOpen };
|
||||
}
|
||||
if (action.type === "setInitialPrompt") {
|
||||
if (state.initialPrompts[action.sessionId] === action.prompt) return state;
|
||||
return {
|
||||
@@ -79,9 +71,14 @@ function copilotReducer(
|
||||
|
||||
export function useCopilotPage() {
|
||||
const router = useRouter();
|
||||
const queryClient = useQueryClient();
|
||||
const { user, isLoggedIn, isUserLoading } = useSupabase();
|
||||
const { toast } = useToast();
|
||||
|
||||
const isNewChatModalOpen = useCopilotStore((s) => s.isNewChatModalOpen);
|
||||
const setIsStreaming = useCopilotStore((s) => s.setIsStreaming);
|
||||
const cancelNewChat = useCopilotStore((s) => s.cancelNewChat);
|
||||
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const flags = useFlags<FlagValues>();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
@@ -93,13 +90,10 @@ export function useCopilotPage() {
|
||||
|
||||
const [state, dispatch] = useReducer(copilotReducer, {
|
||||
pageState: { type: "welcome" },
|
||||
isStreaming: false,
|
||||
isNewChatModalOpen: false,
|
||||
initialPrompts: {},
|
||||
previousSessionId: null,
|
||||
});
|
||||
|
||||
const newChatContext = useNewChat();
|
||||
const greetingName = getGreetingName(user);
|
||||
const quickActions = getQuickActions();
|
||||
|
||||
@@ -124,17 +118,6 @@ export function useCopilotPage() {
|
||||
setPreviousSessionId,
|
||||
});
|
||||
|
||||
useEffect(
|
||||
function registerNewChatHandler() {
|
||||
if (!newChatContext) return;
|
||||
newChatContext.setOnNewChatClick(handleNewChatClick);
|
||||
return function cleanup() {
|
||||
newChatContext.setOnNewChatClick(undefined);
|
||||
};
|
||||
},
|
||||
[newChatContext, handleNewChatClick],
|
||||
);
|
||||
|
||||
useEffect(
|
||||
function transitionNewChatToWelcome() {
|
||||
if (state.pageState.type === "newChat") {
|
||||
@@ -189,6 +172,10 @@ export function useCopilotPage() {
|
||||
prompt: trimmedPrompt,
|
||||
});
|
||||
|
||||
await queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListSessionsQueryKey(),
|
||||
});
|
||||
|
||||
await setUrlSessionId(sessionId, { shallow: false });
|
||||
dispatch({
|
||||
type: "setPageState",
|
||||
@@ -211,37 +198,15 @@ export function useCopilotPage() {
|
||||
}
|
||||
|
||||
function handleStreamingChange(isStreamingValue: boolean) {
|
||||
dispatch({ type: "setStreaming", isStreaming: isStreamingValue });
|
||||
}
|
||||
|
||||
async function proceedWithNewChat() {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: false });
|
||||
if (newChatContext?.performNewChat) {
|
||||
newChatContext.performNewChat();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await setUrlSessionId(null, { shallow: false });
|
||||
} catch (error) {
|
||||
console.error("[CopilotPage] Failed to clear session:", error);
|
||||
}
|
||||
router.replace("/copilot");
|
||||
setIsStreaming(isStreamingValue);
|
||||
}
|
||||
|
||||
function handleCancelNewChat() {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: false });
|
||||
cancelNewChat();
|
||||
}
|
||||
|
||||
function handleNewChatModalOpen(isOpen: boolean) {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen });
|
||||
}
|
||||
|
||||
function handleNewChatClick() {
|
||||
if (state.isStreaming) {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: true });
|
||||
} else {
|
||||
proceedWithNewChat();
|
||||
}
|
||||
if (!isOpen) cancelNewChat();
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -250,7 +215,7 @@ export function useCopilotPage() {
|
||||
quickActions,
|
||||
isLoading: isUserLoading,
|
||||
pageState: state.pageState,
|
||||
isNewChatModalOpen: state.isNewChatModalOpen,
|
||||
isNewChatModalOpen,
|
||||
isReady: isFlagReady && isChatEnabled !== false && isLoggedIn,
|
||||
},
|
||||
handlers: {
|
||||
@@ -259,7 +224,6 @@ export function useCopilotPage() {
|
||||
handleSessionNotFound,
|
||||
handleStreamingChange,
|
||||
handleCancelNewChat,
|
||||
proceedWithNewChat,
|
||||
handleNewChatModalOpen,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import { Navbar } from "@/components/layout/Navbar/Navbar";
|
||||
import { NetworkStatusMonitor } from "@/services/network-status/NetworkStatusMonitor";
|
||||
import { ReactNode } from "react";
|
||||
import { AdminImpersonationBanner } from "./admin/components/AdminImpersonationBanner";
|
||||
|
||||
export default function PlatformLayout({ children }: { children: ReactNode }) {
|
||||
return (
|
||||
<main className="flex h-screen w-full flex-col">
|
||||
<NetworkStatusMonitor />
|
||||
<Navbar />
|
||||
<AdminImpersonationBanner />
|
||||
<section className="flex-1">{children}</section>
|
||||
|
||||
@@ -6,28 +6,40 @@ import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
|
||||
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
|
||||
import {
|
||||
PostHogPageViewTracker,
|
||||
PostHogProvider,
|
||||
PostHogUserTracker,
|
||||
} from "@/providers/posthog/posthog-provider";
|
||||
import { LaunchDarklyProvider } from "@/services/feature-flags/feature-flag-provider";
|
||||
import { QueryClientProvider } from "@tanstack/react-query";
|
||||
import { ThemeProvider, ThemeProviderProps } from "next-themes";
|
||||
import { NuqsAdapter } from "nuqs/adapters/next/app";
|
||||
import { Suspense } from "react";
|
||||
|
||||
export function Providers({ children, ...props }: ThemeProviderProps) {
|
||||
const queryClient = getQueryClient();
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<NuqsAdapter>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
<PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<PostHogUserTracker />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageViewTracker />
|
||||
</Suspense>
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</PostHogProvider>
|
||||
</NuqsAdapter>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface Props extends React.HTMLAttributes<HTMLDivElement> {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function Skeleton({ className, ...props }: Props) {
|
||||
return (
|
||||
<div
|
||||
className={cn("animate-pulse rounded-md bg-zinc-100", className)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { Skeleton } from "./Skeleton";
|
||||
import type { Meta, StoryObj } from "@storybook/nextjs";
|
||||
|
||||
const meta: Meta<typeof Skeleton> = {
|
||||
|
||||
@@ -0,0 +1,234 @@
|
||||
"use client";
|
||||
|
||||
import { create } from "zustand";
|
||||
import type {
|
||||
ActiveStream,
|
||||
StreamChunk,
|
||||
StreamCompleteCallback,
|
||||
StreamResult,
|
||||
StreamStatus,
|
||||
} from "./chat-types";
|
||||
import { executeStream } from "./stream-executor";
|
||||
|
||||
const COMPLETED_STREAM_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
interface ChatStoreState {
|
||||
activeStreams: Map<string, ActiveStream>;
|
||||
completedStreams: Map<string, StreamResult>;
|
||||
activeSessions: Set<string>;
|
||||
streamCompleteCallbacks: Set<StreamCompleteCallback>;
|
||||
}
|
||||
|
||||
interface ChatStoreActions {
|
||||
startStream: (
|
||||
sessionId: string,
|
||||
message: string,
|
||||
isUserMessage: boolean,
|
||||
context?: { url: string; content: string },
|
||||
onChunk?: (chunk: StreamChunk) => void,
|
||||
) => Promise<void>;
|
||||
stopStream: (sessionId: string) => void;
|
||||
subscribeToStream: (
|
||||
sessionId: string,
|
||||
onChunk: (chunk: StreamChunk) => void,
|
||||
skipReplay?: boolean,
|
||||
) => () => void;
|
||||
getStreamStatus: (sessionId: string) => StreamStatus;
|
||||
getCompletedStream: (sessionId: string) => StreamResult | undefined;
|
||||
clearCompletedStream: (sessionId: string) => void;
|
||||
isStreaming: (sessionId: string) => boolean;
|
||||
registerActiveSession: (sessionId: string) => void;
|
||||
unregisterActiveSession: (sessionId: string) => void;
|
||||
isSessionActive: (sessionId: string) => boolean;
|
||||
onStreamComplete: (callback: StreamCompleteCallback) => () => void;
|
||||
}
|
||||
|
||||
type ChatStore = ChatStoreState & ChatStoreActions;
|
||||
|
||||
function notifyStreamComplete(
|
||||
callbacks: Set<StreamCompleteCallback>,
|
||||
sessionId: string,
|
||||
) {
|
||||
for (const callback of callbacks) {
|
||||
try {
|
||||
callback(sessionId);
|
||||
} catch (err) {
|
||||
console.warn("[ChatStore] Stream complete callback error:", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanupCompletedStreams(completedStreams: Map<string, StreamResult>) {
|
||||
const now = Date.now();
|
||||
for (const [sessionId, result] of completedStreams) {
|
||||
if (now - result.completedAt > COMPLETED_STREAM_TTL) {
|
||||
completedStreams.delete(sessionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function moveToCompleted(
|
||||
activeStreams: Map<string, ActiveStream>,
|
||||
completedStreams: Map<string, StreamResult>,
|
||||
streamCompleteCallbacks: Set<StreamCompleteCallback>,
|
||||
sessionId: string,
|
||||
) {
|
||||
const stream = activeStreams.get(sessionId);
|
||||
if (!stream) return;
|
||||
|
||||
const result: StreamResult = {
|
||||
sessionId,
|
||||
status: stream.status,
|
||||
chunks: stream.chunks,
|
||||
completedAt: Date.now(),
|
||||
error: stream.error,
|
||||
};
|
||||
|
||||
completedStreams.set(sessionId, result);
|
||||
activeStreams.delete(sessionId);
|
||||
cleanupCompletedStreams(completedStreams);
|
||||
|
||||
if (stream.status === "completed" || stream.status === "error") {
|
||||
notifyStreamComplete(streamCompleteCallbacks, sessionId);
|
||||
}
|
||||
}
|
||||
|
||||
export const useChatStore = create<ChatStore>((set, get) => ({
|
||||
activeStreams: new Map(),
|
||||
completedStreams: new Map(),
|
||||
activeSessions: new Set(),
|
||||
streamCompleteCallbacks: new Set(),
|
||||
|
||||
startStream: async function startStream(
|
||||
sessionId,
|
||||
message,
|
||||
isUserMessage,
|
||||
context,
|
||||
onChunk,
|
||||
) {
|
||||
const { activeStreams, completedStreams, streamCompleteCallbacks } = get();
|
||||
|
||||
const existingStream = activeStreams.get(sessionId);
|
||||
if (existingStream) {
|
||||
existingStream.abortController.abort();
|
||||
moveToCompleted(
|
||||
activeStreams,
|
||||
completedStreams,
|
||||
streamCompleteCallbacks,
|
||||
sessionId,
|
||||
);
|
||||
}
|
||||
|
||||
const abortController = new AbortController();
|
||||
const initialCallbacks = new Set<(chunk: StreamChunk) => void>();
|
||||
if (onChunk) initialCallbacks.add(onChunk);
|
||||
|
||||
const stream: ActiveStream = {
|
||||
sessionId,
|
||||
abortController,
|
||||
status: "streaming",
|
||||
startedAt: Date.now(),
|
||||
chunks: [],
|
||||
onChunkCallbacks: initialCallbacks,
|
||||
};
|
||||
|
||||
activeStreams.set(sessionId, stream);
|
||||
|
||||
try {
|
||||
await executeStream(stream, message, isUserMessage, context);
|
||||
} finally {
|
||||
if (onChunk) stream.onChunkCallbacks.delete(onChunk);
|
||||
if (stream.status !== "streaming") {
|
||||
moveToCompleted(
|
||||
activeStreams,
|
||||
completedStreams,
|
||||
streamCompleteCallbacks,
|
||||
sessionId,
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
stopStream: function stopStream(sessionId) {
|
||||
const { activeStreams, completedStreams, streamCompleteCallbacks } = get();
|
||||
const stream = activeStreams.get(sessionId);
|
||||
if (stream) {
|
||||
stream.abortController.abort();
|
||||
stream.status = "completed";
|
||||
moveToCompleted(
|
||||
activeStreams,
|
||||
completedStreams,
|
||||
streamCompleteCallbacks,
|
||||
sessionId,
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
subscribeToStream: function subscribeToStream(
|
||||
sessionId,
|
||||
onChunk,
|
||||
skipReplay = false,
|
||||
) {
|
||||
const { activeStreams } = get();
|
||||
|
||||
const stream = activeStreams.get(sessionId);
|
||||
if (stream) {
|
||||
if (!skipReplay) {
|
||||
for (const chunk of stream.chunks) {
|
||||
onChunk(chunk);
|
||||
}
|
||||
}
|
||||
stream.onChunkCallbacks.add(onChunk);
|
||||
return function unsubscribe() {
|
||||
stream.onChunkCallbacks.delete(onChunk);
|
||||
};
|
||||
}
|
||||
|
||||
return function noop() {};
|
||||
},
|
||||
|
||||
getStreamStatus: function getStreamStatus(sessionId) {
|
||||
const { activeStreams, completedStreams } = get();
|
||||
|
||||
const active = activeStreams.get(sessionId);
|
||||
if (active) return active.status;
|
||||
|
||||
const completed = completedStreams.get(sessionId);
|
||||
if (completed) return completed.status;
|
||||
|
||||
return "idle";
|
||||
},
|
||||
|
||||
getCompletedStream: function getCompletedStream(sessionId) {
|
||||
return get().completedStreams.get(sessionId);
|
||||
},
|
||||
|
||||
clearCompletedStream: function clearCompletedStream(sessionId) {
|
||||
get().completedStreams.delete(sessionId);
|
||||
},
|
||||
|
||||
isStreaming: function isStreaming(sessionId) {
|
||||
const stream = get().activeStreams.get(sessionId);
|
||||
return stream?.status === "streaming";
|
||||
},
|
||||
|
||||
registerActiveSession: function registerActiveSession(sessionId) {
|
||||
get().activeSessions.add(sessionId);
|
||||
},
|
||||
|
||||
unregisterActiveSession: function unregisterActiveSession(sessionId) {
|
||||
get().activeSessions.delete(sessionId);
|
||||
},
|
||||
|
||||
isSessionActive: function isSessionActive(sessionId) {
|
||||
return get().activeSessions.has(sessionId);
|
||||
},
|
||||
|
||||
onStreamComplete: function onStreamComplete(callback) {
|
||||
const { streamCompleteCallbacks } = get();
|
||||
streamCompleteCallbacks.add(callback);
|
||||
return function unsubscribe() {
|
||||
streamCompleteCallbacks.delete(callback);
|
||||
};
|
||||
},
|
||||
}));
|
||||
@@ -0,0 +1,94 @@
|
||||
import type { ToolArguments, ToolResult } from "@/types/chat";
|
||||
|
||||
export type StreamStatus = "idle" | "streaming" | "completed" | "error";
|
||||
|
||||
export interface StreamChunk {
|
||||
type:
|
||||
| "text_chunk"
|
||||
| "text_ended"
|
||||
| "tool_call"
|
||||
| "tool_call_start"
|
||||
| "tool_response"
|
||||
| "login_needed"
|
||||
| "need_login"
|
||||
| "credentials_needed"
|
||||
| "error"
|
||||
| "usage"
|
||||
| "stream_end";
|
||||
timestamp?: string;
|
||||
content?: string;
|
||||
message?: string;
|
||||
code?: string;
|
||||
details?: Record<string, unknown>;
|
||||
tool_id?: string;
|
||||
tool_name?: string;
|
||||
arguments?: ToolArguments;
|
||||
result?: ToolResult;
|
||||
success?: boolean;
|
||||
idx?: number;
|
||||
session_id?: string;
|
||||
agent_info?: {
|
||||
graph_id: string;
|
||||
name: string;
|
||||
trigger_type: string;
|
||||
};
|
||||
provider?: string;
|
||||
provider_name?: string;
|
||||
credential_type?: string;
|
||||
scopes?: string[];
|
||||
title?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export type VercelStreamChunk =
|
||||
| { type: "start"; messageId: string }
|
||||
| { type: "finish" }
|
||||
| { type: "text-start"; id: string }
|
||||
| { type: "text-delta"; id: string; delta: string }
|
||||
| { type: "text-end"; id: string }
|
||||
| { type: "tool-input-start"; toolCallId: string; toolName: string }
|
||||
| {
|
||||
type: "tool-input-available";
|
||||
toolCallId: string;
|
||||
toolName: string;
|
||||
input: Record<string, unknown>;
|
||||
}
|
||||
| {
|
||||
type: "tool-output-available";
|
||||
toolCallId: string;
|
||||
toolName?: string;
|
||||
output: unknown;
|
||||
success?: boolean;
|
||||
}
|
||||
| {
|
||||
type: "usage";
|
||||
promptTokens: number;
|
||||
completionTokens: number;
|
||||
totalTokens: number;
|
||||
}
|
||||
| {
|
||||
type: "error";
|
||||
errorText: string;
|
||||
code?: string;
|
||||
details?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
export interface ActiveStream {
|
||||
sessionId: string;
|
||||
abortController: AbortController;
|
||||
status: StreamStatus;
|
||||
startedAt: number;
|
||||
chunks: StreamChunk[];
|
||||
error?: Error;
|
||||
onChunkCallbacks: Set<(chunk: StreamChunk) => void>;
|
||||
}
|
||||
|
||||
export interface StreamResult {
|
||||
sessionId: string;
|
||||
status: StreamStatus;
|
||||
chunks: StreamChunk[];
|
||||
completedAt: number;
|
||||
error?: Error;
|
||||
}
|
||||
|
||||
export type StreamCompleteCallback = (sessionId: string) => void;
|
||||
@@ -4,6 +4,7 @@ import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { GlobeHemisphereEastIcon } from "@phosphor-icons/react";
|
||||
import { useEffect } from "react";
|
||||
import { ChatInput } from "../ChatInput/ChatInput";
|
||||
import { MessageList } from "../MessageList/MessageList";
|
||||
@@ -55,24 +56,37 @@ export function ChatContainer({
|
||||
)}
|
||||
>
|
||||
<Dialog
|
||||
title="Service unavailable"
|
||||
title={
|
||||
<div className="flex items-center gap-2">
|
||||
<GlobeHemisphereEastIcon className="size-6" />
|
||||
<Text
|
||||
variant="body"
|
||||
className="text-md font-poppins leading-none md:text-lg"
|
||||
>
|
||||
Service unavailable
|
||||
</Text>
|
||||
</div>
|
||||
}
|
||||
controlled={{
|
||||
isOpen: isRegionBlockedModalOpen,
|
||||
set: handleRegionModalOpenChange,
|
||||
}}
|
||||
onClose={handleRegionModalClose}
|
||||
styling={{ maxWidth: 550, width: "100%", minWidth: "auto" }}
|
||||
>
|
||||
<Dialog.Content>
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col gap-8">
|
||||
<Text variant="body">
|
||||
This model is not available in your region. Please connect via VPN
|
||||
and try again.
|
||||
The Autogpt AI model is not available in your region or your
|
||||
connection is blocking it. Please try again with a different
|
||||
connection.
|
||||
</Text>
|
||||
<div className="flex justify-end">
|
||||
<div className="flex justify-center">
|
||||
<Button
|
||||
type="button"
|
||||
variant="primary"
|
||||
onClick={handleRegionModalClose}
|
||||
className="w-full"
|
||||
>
|
||||
Got it
|
||||
</Button>
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { toast } from "sonner";
|
||||
import { StreamChunk } from "../../useChatStream";
|
||||
import type { StreamChunk } from "../../chat-types";
|
||||
import type { HandlerDependencies } from "./handlers";
|
||||
import {
|
||||
handleError,
|
||||
|
||||
@@ -1,7 +1,118 @@
|
||||
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
|
||||
import { SessionKey, sessionStorage } from "@/services/storage/session-storage";
|
||||
import type { ToolResult } from "@/types/chat";
|
||||
import type { ChatMessageData } from "../ChatMessage/useChatMessage";
|
||||
|
||||
export function processInitialMessages(
|
||||
initialMessages: SessionDetailResponse["messages"],
|
||||
): ChatMessageData[] {
|
||||
const processedMessages: ChatMessageData[] = [];
|
||||
const toolCallMap = new Map<string, string>();
|
||||
|
||||
for (const msg of initialMessages) {
|
||||
if (!isValidMessage(msg)) {
|
||||
console.warn("Invalid message structure from backend:", msg);
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = String(msg.content || "");
|
||||
const role = String(msg.role || "assistant").toLowerCase();
|
||||
const toolCalls = msg.tool_calls;
|
||||
const timestamp = msg.timestamp
|
||||
? new Date(msg.timestamp as string)
|
||||
: undefined;
|
||||
|
||||
if (role === "user") {
|
||||
content = removePageContext(content);
|
||||
if (!content.trim()) continue;
|
||||
processedMessages.push({
|
||||
type: "message",
|
||||
role: "user",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (role === "assistant") {
|
||||
content = content
|
||||
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
|
||||
.replace(/<internal_reasoning>[\s\S]*?<\/internal_reasoning>/gi, "")
|
||||
.trim();
|
||||
|
||||
if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) {
|
||||
for (const toolCall of toolCalls) {
|
||||
const toolName = toolCall.function.name;
|
||||
const toolId = toolCall.id;
|
||||
toolCallMap.set(toolId, toolName);
|
||||
|
||||
try {
|
||||
const args = JSON.parse(toolCall.function.arguments || "{}");
|
||||
processedMessages.push({
|
||||
type: "tool_call",
|
||||
toolId,
|
||||
toolName,
|
||||
arguments: args,
|
||||
timestamp,
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn("Failed to parse tool call arguments:", err);
|
||||
processedMessages.push({
|
||||
type: "tool_call",
|
||||
toolId,
|
||||
toolName,
|
||||
arguments: {},
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (content.trim()) {
|
||||
processedMessages.push({
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
} else if (content.trim()) {
|
||||
processedMessages.push({
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (role === "tool") {
|
||||
const toolCallId = (msg.tool_call_id as string) || "";
|
||||
const toolName = toolCallMap.get(toolCallId) || "unknown";
|
||||
const toolResponse = parseToolResponse(
|
||||
content,
|
||||
toolCallId,
|
||||
toolName,
|
||||
timestamp,
|
||||
);
|
||||
if (toolResponse) {
|
||||
processedMessages.push(toolResponse);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.trim()) {
|
||||
processedMessages.push({
|
||||
type: "message",
|
||||
role: role as "user" | "assistant" | "system",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return processedMessages;
|
||||
}
|
||||
|
||||
export function hasSentInitialPrompt(sessionId: string): boolean {
|
||||
try {
|
||||
const sent = JSON.parse(
|
||||
@@ -213,6 +324,23 @@ export function parseToolResponse(
|
||||
timestamp: timestamp || new Date(),
|
||||
};
|
||||
}
|
||||
if (responseType === "clarification_needed") {
|
||||
return {
|
||||
type: "clarification_needed",
|
||||
toolName,
|
||||
questions:
|
||||
(parsedResult.questions as Array<{
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}>) || [],
|
||||
message:
|
||||
(parsedResult.message as string) ||
|
||||
"I need more information to proceed.",
|
||||
sessionId: (parsedResult.session_id as string) || "",
|
||||
timestamp: timestamp || new Date(),
|
||||
};
|
||||
}
|
||||
if (responseType === "need_login") {
|
||||
return {
|
||||
type: "login_needed",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useChatStore } from "../../chat-store";
|
||||
import { toast } from "sonner";
|
||||
import { useChatStream } from "../../useChatStream";
|
||||
import { usePageContext } from "../../usePageContext";
|
||||
@@ -9,11 +10,8 @@ import {
|
||||
createUserMessage,
|
||||
filterAuthMessages,
|
||||
hasSentInitialPrompt,
|
||||
isToolCallArray,
|
||||
isValidMessage,
|
||||
markInitialPromptSent,
|
||||
parseToolResponse,
|
||||
removePageContext,
|
||||
processInitialMessages,
|
||||
} from "./helpers";
|
||||
|
||||
interface Args {
|
||||
@@ -41,11 +39,18 @@ export function useChatContainer({
|
||||
sendMessage: sendStreamMessage,
|
||||
stopStreaming,
|
||||
} = useChatStream();
|
||||
const activeStreams = useChatStore((s) => s.activeStreams);
|
||||
const subscribeToStream = useChatStore((s) => s.subscribeToStream);
|
||||
const isStreaming = isStreamingInitiated || hasTextChunks;
|
||||
|
||||
useEffect(() => {
|
||||
if (sessionId !== previousSessionIdRef.current) {
|
||||
stopStreaming(previousSessionIdRef.current ?? undefined, true);
|
||||
useEffect(
|
||||
function handleSessionChange() {
|
||||
if (sessionId === previousSessionIdRef.current) return;
|
||||
|
||||
const prevSession = previousSessionIdRef.current;
|
||||
if (prevSession) {
|
||||
stopStreaming(prevSession);
|
||||
}
|
||||
previousSessionIdRef.current = sessionId;
|
||||
setMessages([]);
|
||||
setStreamingChunks([]);
|
||||
@@ -53,138 +58,11 @@ export function useChatContainer({
|
||||
setHasTextChunks(false);
|
||||
setIsStreamingInitiated(false);
|
||||
hasResponseRef.current = false;
|
||||
}
|
||||
}, [sessionId, stopStreaming]);
|
||||
|
||||
const allMessages = useMemo(() => {
|
||||
const processedInitialMessages: ChatMessageData[] = [];
|
||||
const toolCallMap = new Map<string, string>();
|
||||
if (!sessionId) return;
|
||||
|
||||
for (const msg of initialMessages) {
|
||||
if (!isValidMessage(msg)) {
|
||||
console.warn("Invalid message structure from backend:", msg);
|
||||
continue;
|
||||
}
|
||||
|
||||
let content = String(msg.content || "");
|
||||
const role = String(msg.role || "assistant").toLowerCase();
|
||||
const toolCalls = msg.tool_calls;
|
||||
const timestamp = msg.timestamp
|
||||
? new Date(msg.timestamp as string)
|
||||
: undefined;
|
||||
|
||||
if (role === "user") {
|
||||
content = removePageContext(content);
|
||||
if (!content.trim()) continue;
|
||||
processedInitialMessages.push({
|
||||
type: "message",
|
||||
role: "user",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (role === "assistant") {
|
||||
content = content
|
||||
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
|
||||
.trim();
|
||||
|
||||
if (toolCalls && isToolCallArray(toolCalls) && toolCalls.length > 0) {
|
||||
for (const toolCall of toolCalls) {
|
||||
const toolName = toolCall.function.name;
|
||||
const toolId = toolCall.id;
|
||||
toolCallMap.set(toolId, toolName);
|
||||
|
||||
try {
|
||||
const args = JSON.parse(toolCall.function.arguments || "{}");
|
||||
processedInitialMessages.push({
|
||||
type: "tool_call",
|
||||
toolId,
|
||||
toolName,
|
||||
arguments: args,
|
||||
timestamp,
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn("Failed to parse tool call arguments:", err);
|
||||
processedInitialMessages.push({
|
||||
type: "tool_call",
|
||||
toolId,
|
||||
toolName,
|
||||
arguments: {},
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
if (content.trim()) {
|
||||
processedInitialMessages.push({
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
} else if (content.trim()) {
|
||||
processedInitialMessages.push({
|
||||
type: "message",
|
||||
role: "assistant",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (role === "tool") {
|
||||
const toolCallId = (msg.tool_call_id as string) || "";
|
||||
const toolName = toolCallMap.get(toolCallId) || "unknown";
|
||||
const toolResponse = parseToolResponse(
|
||||
content,
|
||||
toolCallId,
|
||||
toolName,
|
||||
timestamp,
|
||||
);
|
||||
if (toolResponse) {
|
||||
processedInitialMessages.push(toolResponse);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (content.trim()) {
|
||||
processedInitialMessages.push({
|
||||
type: "message",
|
||||
role: role as "user" | "assistant" | "system",
|
||||
content,
|
||||
timestamp,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return [...processedInitialMessages, ...messages];
|
||||
}, [initialMessages, messages]);
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async function sendMessage(
|
||||
content: string,
|
||||
isUserMessage: boolean = true,
|
||||
context?: { url: string; content: string },
|
||||
) {
|
||||
if (!sessionId) {
|
||||
console.error("[useChatContainer] Cannot send message: no session ID");
|
||||
return;
|
||||
}
|
||||
setIsRegionBlockedModalOpen(false);
|
||||
if (isUserMessage) {
|
||||
const userMessage = createUserMessage(content);
|
||||
setMessages((prev) => [...filterAuthMessages(prev), userMessage]);
|
||||
} else {
|
||||
setMessages((prev) => filterAuthMessages(prev));
|
||||
}
|
||||
setStreamingChunks([]);
|
||||
streamingChunksRef.current = [];
|
||||
setHasTextChunks(false);
|
||||
setIsStreamingInitiated(true);
|
||||
hasResponseRef.current = false;
|
||||
const activeStream = activeStreams.get(sessionId);
|
||||
if (!activeStream || activeStream.status !== "streaming") return;
|
||||
|
||||
const dispatcher = createStreamEventDispatcher({
|
||||
setHasTextChunks,
|
||||
@@ -197,42 +75,85 @@ export function useChatContainer({
|
||||
setIsStreamingInitiated,
|
||||
});
|
||||
|
||||
try {
|
||||
await sendStreamMessage(
|
||||
sessionId,
|
||||
content,
|
||||
dispatcher,
|
||||
isUserMessage,
|
||||
context,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error("[useChatContainer] Failed to send message:", err);
|
||||
setIsStreamingInitiated(false);
|
||||
|
||||
// Don't show error toast for AbortError (expected during cleanup)
|
||||
if (err instanceof Error && err.name === "AbortError") return;
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to send message";
|
||||
toast.error("Failed to send message", {
|
||||
description: errorMessage,
|
||||
});
|
||||
}
|
||||
setIsStreamingInitiated(true);
|
||||
const skipReplay = initialMessages.length > 0;
|
||||
return subscribeToStream(sessionId, dispatcher, skipReplay);
|
||||
},
|
||||
[sessionId, sendStreamMessage],
|
||||
[sessionId, stopStreaming, activeStreams, subscribeToStream],
|
||||
);
|
||||
|
||||
const handleStopStreaming = useCallback(() => {
|
||||
const allMessages = useMemo(
|
||||
() => [...processInitialMessages(initialMessages), ...messages],
|
||||
[initialMessages, messages],
|
||||
);
|
||||
|
||||
async function sendMessage(
|
||||
content: string,
|
||||
isUserMessage: boolean = true,
|
||||
context?: { url: string; content: string },
|
||||
) {
|
||||
if (!sessionId) {
|
||||
console.error("[useChatContainer] Cannot send message: no session ID");
|
||||
return;
|
||||
}
|
||||
setIsRegionBlockedModalOpen(false);
|
||||
if (isUserMessage) {
|
||||
const userMessage = createUserMessage(content);
|
||||
setMessages((prev) => [...filterAuthMessages(prev), userMessage]);
|
||||
} else {
|
||||
setMessages((prev) => filterAuthMessages(prev));
|
||||
}
|
||||
setStreamingChunks([]);
|
||||
streamingChunksRef.current = [];
|
||||
setHasTextChunks(false);
|
||||
setIsStreamingInitiated(true);
|
||||
hasResponseRef.current = false;
|
||||
|
||||
const dispatcher = createStreamEventDispatcher({
|
||||
setHasTextChunks,
|
||||
setStreamingChunks,
|
||||
streamingChunksRef,
|
||||
hasResponseRef,
|
||||
setMessages,
|
||||
setIsRegionBlockedModalOpen,
|
||||
sessionId,
|
||||
setIsStreamingInitiated,
|
||||
});
|
||||
|
||||
try {
|
||||
await sendStreamMessage(
|
||||
sessionId,
|
||||
content,
|
||||
dispatcher,
|
||||
isUserMessage,
|
||||
context,
|
||||
);
|
||||
} catch (err) {
|
||||
console.error("[useChatContainer] Failed to send message:", err);
|
||||
setIsStreamingInitiated(false);
|
||||
|
||||
if (err instanceof Error && err.name === "AbortError") return;
|
||||
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to send message";
|
||||
toast.error("Failed to send message", {
|
||||
description: errorMessage,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function handleStopStreaming() {
|
||||
stopStreaming();
|
||||
setStreamingChunks([]);
|
||||
streamingChunksRef.current = [];
|
||||
setHasTextChunks(false);
|
||||
setIsStreamingInitiated(false);
|
||||
}, [stopStreaming]);
|
||||
}
|
||||
|
||||
const { capturePageContext } = usePageContext();
|
||||
const sendMessageRef = useRef(sendMessage);
|
||||
sendMessageRef.current = sendMessage;
|
||||
|
||||
// Send initial prompt if provided (for new sessions from homepage)
|
||||
useEffect(
|
||||
function handleInitialPrompt() {
|
||||
if (!initialPrompt || !sessionId) return;
|
||||
@@ -241,15 +162,9 @@ export function useChatContainer({
|
||||
|
||||
markInitialPromptSent(sessionId);
|
||||
const context = capturePageContext();
|
||||
sendMessage(initialPrompt, true, context);
|
||||
sendMessageRef.current(initialPrompt, true, context);
|
||||
},
|
||||
[
|
||||
initialPrompt,
|
||||
sessionId,
|
||||
initialMessages.length,
|
||||
sendMessage,
|
||||
capturePageContext,
|
||||
],
|
||||
[initialPrompt, sessionId, initialMessages.length, capturePageContext],
|
||||
);
|
||||
|
||||
async function sendMessageWithContext(
|
||||
|
||||
@@ -21,7 +21,7 @@ export function ChatInput({
|
||||
className,
|
||||
}: Props) {
|
||||
const inputId = "chat-input";
|
||||
const { value, setValue, handleKeyDown, handleSend, hasMultipleLines } =
|
||||
const { value, handleKeyDown, handleSubmit, handleChange, hasMultipleLines } =
|
||||
useChatInput({
|
||||
onSend,
|
||||
disabled: disabled || isStreaming,
|
||||
@@ -29,15 +29,6 @@ export function ChatInput({
|
||||
inputId,
|
||||
});
|
||||
|
||||
function handleSubmit(e: React.FormEvent<HTMLFormElement>) {
|
||||
e.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
|
||||
function handleChange(e: React.ChangeEvent<HTMLTextAreaElement>) {
|
||||
setValue(e.target.value);
|
||||
}
|
||||
|
||||
return (
|
||||
<form onSubmit={handleSubmit} className={cn("relative flex-1", className)}>
|
||||
<div className="relative">
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { KeyboardEvent, useCallback, useEffect, useState } from "react";
|
||||
import {
|
||||
ChangeEvent,
|
||||
FormEvent,
|
||||
KeyboardEvent,
|
||||
useEffect,
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
interface UseChatInputArgs {
|
||||
onSend: (message: string) => void;
|
||||
@@ -16,6 +22,23 @@ export function useChatInput({
|
||||
const [value, setValue] = useState("");
|
||||
const [hasMultipleLines, setHasMultipleLines] = useState(false);
|
||||
|
||||
useEffect(
|
||||
function focusOnMount() {
|
||||
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
|
||||
if (textarea) textarea.focus();
|
||||
},
|
||||
[inputId],
|
||||
);
|
||||
|
||||
useEffect(
|
||||
function focusWhenEnabled() {
|
||||
if (disabled) return;
|
||||
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
|
||||
if (textarea) textarea.focus();
|
||||
},
|
||||
[disabled, inputId],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
|
||||
const wrapper = document.getElementById(
|
||||
@@ -77,7 +100,7 @@ export function useChatInput({
|
||||
}
|
||||
}, [value, maxRows, inputId]);
|
||||
|
||||
const handleSend = useCallback(() => {
|
||||
const handleSend = () => {
|
||||
if (disabled || !value.trim()) return;
|
||||
onSend(value.trim());
|
||||
setValue("");
|
||||
@@ -93,23 +116,31 @@ export function useChatInput({
|
||||
wrapper.style.height = "";
|
||||
wrapper.style.maxHeight = "";
|
||||
}
|
||||
}, [value, onSend, disabled, inputId]);
|
||||
};
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(event: KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (event.key === "Enter" && !event.shiftKey) {
|
||||
event.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
},
|
||||
[handleSend],
|
||||
);
|
||||
function handleKeyDown(event: KeyboardEvent<HTMLTextAreaElement>) {
|
||||
if (event.key === "Enter" && !event.shiftKey) {
|
||||
event.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
}
|
||||
|
||||
function handleSubmit(e: FormEvent<HTMLFormElement>) {
|
||||
e.preventDefault();
|
||||
handleSend();
|
||||
}
|
||||
|
||||
function handleChange(e: ChangeEvent<HTMLTextAreaElement>) {
|
||||
setValue(e.target.value);
|
||||
}
|
||||
|
||||
return {
|
||||
value,
|
||||
setValue,
|
||||
handleKeyDown,
|
||||
handleSend,
|
||||
handleSubmit,
|
||||
handleChange,
|
||||
hasMultipleLines,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import { AgentCarouselMessage } from "../AgentCarouselMessage/AgentCarouselMessa
|
||||
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
|
||||
import { AuthPromptWidget } from "../AuthPromptWidget/AuthPromptWidget";
|
||||
import { ChatCredentialsSetup } from "../ChatCredentialsSetup/ChatCredentialsSetup";
|
||||
import { ClarificationQuestionsWidget } from "../ClarificationQuestionsWidget/ClarificationQuestionsWidget";
|
||||
import { ExecutionStartedMessage } from "../ExecutionStartedMessage/ExecutionStartedMessage";
|
||||
import { MarkdownContent } from "../MarkdownContent/MarkdownContent";
|
||||
import { NoResultsMessage } from "../NoResultsMessage/NoResultsMessage";
|
||||
@@ -69,6 +70,7 @@ export function ChatMessage({
|
||||
isToolResponse,
|
||||
isLoginNeeded,
|
||||
isCredentialsNeeded,
|
||||
isClarificationNeeded,
|
||||
} = useChatMessage(message);
|
||||
const displayContent = getDisplayContent(message, isUser);
|
||||
|
||||
@@ -96,6 +98,18 @@ export function ChatMessage({
|
||||
}
|
||||
}
|
||||
|
||||
function handleClarificationAnswers(answers: Record<string, string>) {
|
||||
if (onSendMessage) {
|
||||
const contextMessage = Object.entries(answers)
|
||||
.map(([keyword, answer]) => `${keyword}: ${answer}`)
|
||||
.join("\n");
|
||||
|
||||
onSendMessage(
|
||||
`I have the answers to your questions:\n\n${contextMessage}\n\nPlease proceed with creating the agent.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const handleCopy = useCallback(
|
||||
async function handleCopy() {
|
||||
if (message.type !== "message") return;
|
||||
@@ -141,6 +155,17 @@ export function ChatMessage({
|
||||
);
|
||||
}
|
||||
|
||||
if (isClarificationNeeded && message.type === "clarification_needed") {
|
||||
return (
|
||||
<ClarificationQuestionsWidget
|
||||
questions={message.questions}
|
||||
message={message.message}
|
||||
onSubmitAnswers={handleClarificationAnswers}
|
||||
className={className}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// Render login needed messages
|
||||
if (isLoginNeeded && message.type === "login_needed") {
|
||||
// If user is already logged in, show success message instead of auth prompt
|
||||
|
||||
@@ -91,6 +91,18 @@ export type ChatMessageData =
|
||||
credentialsSchema?: Record<string, any>;
|
||||
message: string;
|
||||
timestamp?: string | Date;
|
||||
}
|
||||
| {
|
||||
type: "clarification_needed";
|
||||
toolName: string;
|
||||
questions: Array<{
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}>;
|
||||
message: string;
|
||||
sessionId: string;
|
||||
timestamp?: string | Date;
|
||||
};
|
||||
|
||||
export function useChatMessage(message: ChatMessageData) {
|
||||
@@ -111,5 +123,6 @@ export function useChatMessage(message: ChatMessageData) {
|
||||
isAgentCarousel: message.type === "agent_carousel",
|
||||
isExecutionStarted: message.type === "execution_started",
|
||||
isInputsNeeded: message.type === "inputs_needed",
|
||||
isClarificationNeeded: message.type === "clarification_needed",
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,154 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Card } from "@/components/atoms/Card/Card";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { CheckCircleIcon, QuestionIcon } from "@phosphor-icons/react";
|
||||
import { useState } from "react";
|
||||
|
||||
export interface ClarifyingQuestion {
|
||||
question: string;
|
||||
keyword: string;
|
||||
example?: string;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
questions: ClarifyingQuestion[];
|
||||
message: string;
|
||||
onSubmitAnswers: (answers: Record<string, string>) => void;
|
||||
onCancel?: () => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function ClarificationQuestionsWidget({
|
||||
questions,
|
||||
message,
|
||||
onSubmitAnswers,
|
||||
onCancel,
|
||||
className,
|
||||
}: Props) {
|
||||
const [answers, setAnswers] = useState<Record<string, string>>({});
|
||||
|
||||
function handleAnswerChange(keyword: string, value: string) {
|
||||
setAnswers((prev) => ({ ...prev, [keyword]: value }));
|
||||
}
|
||||
|
||||
function handleSubmit() {
|
||||
// Check if all questions are answered
|
||||
const allAnswered = questions.every((q) => answers[q.keyword]?.trim());
|
||||
if (!allAnswered) {
|
||||
return;
|
||||
}
|
||||
onSubmitAnswers(answers);
|
||||
}
|
||||
|
||||
const allAnswered = questions.every((q) => answers[q.keyword]?.trim());
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"group relative flex w-full justify-start gap-3 px-4 py-3",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<div className="flex w-full max-w-3xl gap-3">
|
||||
<div className="flex-shrink-0">
|
||||
<div className="flex h-7 w-7 items-center justify-center rounded-lg bg-indigo-500">
|
||||
<QuestionIcon className="h-4 w-4 text-indigo-50" weight="bold" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="flex min-w-0 flex-1 flex-col">
|
||||
<Card className="space-y-4 p-4">
|
||||
<div>
|
||||
<Text variant="h4" className="mb-1 text-slate-900">
|
||||
I need more information
|
||||
</Text>
|
||||
<Text variant="small" className="text-slate-600">
|
||||
{message}
|
||||
</Text>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
{questions.map((q, index) => {
|
||||
const isAnswered = !!answers[q.keyword]?.trim();
|
||||
|
||||
return (
|
||||
<div
|
||||
key={`${q.keyword}-${index}`}
|
||||
className={cn(
|
||||
"relative rounded-lg border p-3",
|
||||
isAnswered
|
||||
? "border-green-500 bg-green-50/50"
|
||||
: "border-slate-200 bg-white/50",
|
||||
)}
|
||||
>
|
||||
<div className="mb-2 flex items-start gap-2">
|
||||
{isAnswered ? (
|
||||
<CheckCircleIcon
|
||||
size={16}
|
||||
className="mt-0.5 text-green-500"
|
||||
weight="bold"
|
||||
/>
|
||||
) : (
|
||||
<div className="mt-0.5 flex h-4 w-4 items-center justify-center rounded-full border border-slate-300 bg-white text-xs text-slate-500">
|
||||
{index + 1}
|
||||
</div>
|
||||
)}
|
||||
<div className="flex-1">
|
||||
<Text
|
||||
variant="small"
|
||||
className="mb-2 font-semibold text-slate-900"
|
||||
>
|
||||
{q.question}
|
||||
</Text>
|
||||
{q.example && (
|
||||
<Text
|
||||
variant="small"
|
||||
className="mb-2 italic text-slate-500"
|
||||
>
|
||||
Example: {q.example}
|
||||
</Text>
|
||||
)}
|
||||
<Input
|
||||
type="textarea"
|
||||
id={`clarification-${q.keyword}-${index}`}
|
||||
label={q.question}
|
||||
hideLabel
|
||||
placeholder="Your answer..."
|
||||
rows={2}
|
||||
value={answers[q.keyword] || ""}
|
||||
onChange={(e) =>
|
||||
handleAnswerChange(q.keyword, e.target.value)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
onClick={handleSubmit}
|
||||
disabled={!allAnswered}
|
||||
className="flex-1"
|
||||
variant="primary"
|
||||
>
|
||||
Submit Answers
|
||||
</Button>
|
||||
{onCancel && (
|
||||
<Button onClick={onCancel} variant="outline">
|
||||
Cancel
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</Card>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import { AIChatBubble } from "../../../AIChatBubble/AIChatBubble";
|
||||
import type { ChatMessageData } from "../../../ChatMessage/useChatMessage";
|
||||
import { MarkdownContent } from "../../../MarkdownContent/MarkdownContent";
|
||||
import { formatToolResponse } from "../../../ToolResponseMessage/helpers";
|
||||
import { ToolResponseMessage } from "../../../ToolResponseMessage/ToolResponseMessage";
|
||||
import { shouldSkipAgentOutput } from "../../helpers";
|
||||
|
||||
export interface LastToolResponseProps {
|
||||
@@ -15,16 +13,15 @@ export function LastToolResponse({
|
||||
}: LastToolResponseProps) {
|
||||
if (message.type !== "tool_response") return null;
|
||||
|
||||
// Skip if this is an agent_output that should be rendered inside assistant message
|
||||
if (shouldSkipAgentOutput(message, prevMessage)) return null;
|
||||
|
||||
const formattedText = formatToolResponse(message.result, message.toolName);
|
||||
|
||||
return (
|
||||
<div className="min-w-0 overflow-x-hidden hyphens-auto break-words px-4 py-2">
|
||||
<AIChatBubble>
|
||||
<MarkdownContent content={formattedText} />
|
||||
</AIChatBubble>
|
||||
<ToolResponseMessage
|
||||
toolId={message.toolId}
|
||||
toolName={message.toolName}
|
||||
result={message.result}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { cn } from "@/lib/utils";
|
||||
import type { ToolResult } from "@/types/chat";
|
||||
import { WarningCircleIcon } from "@phosphor-icons/react";
|
||||
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
|
||||
import { MarkdownContent } from "../MarkdownContent/MarkdownContent";
|
||||
import { formatToolResponse } from "./helpers";
|
||||
import {
|
||||
formatToolResponse,
|
||||
getErrorMessage,
|
||||
isErrorResponse,
|
||||
} from "./helpers";
|
||||
|
||||
export interface ToolResponseMessageProps {
|
||||
toolId?: string;
|
||||
@@ -18,6 +25,24 @@ export function ToolResponseMessage({
|
||||
success: _success,
|
||||
className,
|
||||
}: ToolResponseMessageProps) {
|
||||
if (isErrorResponse(result)) {
|
||||
const errorMessage = getErrorMessage(result);
|
||||
return (
|
||||
<AIChatBubble className={className}>
|
||||
<div className="flex items-center gap-2">
|
||||
<WarningCircleIcon
|
||||
size={14}
|
||||
weight="regular"
|
||||
className="shrink-0 text-neutral-400"
|
||||
/>
|
||||
<Text variant="small" className={cn("text-xs text-neutral-500")}>
|
||||
{errorMessage}
|
||||
</Text>
|
||||
</div>
|
||||
</AIChatBubble>
|
||||
);
|
||||
}
|
||||
|
||||
const formattedText = formatToolResponse(result, toolName);
|
||||
|
||||
return (
|
||||
|
||||
@@ -1,3 +1,42 @@
|
||||
function stripInternalReasoning(content: string): string {
|
||||
return content
|
||||
.replace(/<internal_reasoning>[\s\S]*?<\/internal_reasoning>/gi, "")
|
||||
.replace(/<thinking>[\s\S]*?<\/thinking>/gi, "")
|
||||
.replace(/\n{3,}/g, "\n\n")
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function isErrorResponse(result: unknown): boolean {
|
||||
if (typeof result === "string") {
|
||||
const lower = result.toLowerCase();
|
||||
return (
|
||||
lower.startsWith("error:") ||
|
||||
lower.includes("not found") ||
|
||||
lower.includes("does not exist") ||
|
||||
lower.includes("failed to") ||
|
||||
lower.includes("unable to")
|
||||
);
|
||||
}
|
||||
if (typeof result === "object" && result !== null) {
|
||||
const response = result as Record<string, unknown>;
|
||||
return response.type === "error" || response.error !== undefined;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function getErrorMessage(result: unknown): string {
|
||||
if (typeof result === "string") {
|
||||
return stripInternalReasoning(result.replace(/^error:\s*/i, ""));
|
||||
}
|
||||
if (typeof result === "object" && result !== null) {
|
||||
const response = result as Record<string, unknown>;
|
||||
if (response.error) return stripInternalReasoning(String(response.error));
|
||||
if (response.message)
|
||||
return stripInternalReasoning(String(response.message));
|
||||
}
|
||||
return "An error occurred";
|
||||
}
|
||||
|
||||
function getToolCompletionPhrase(toolName: string): string {
|
||||
const toolCompletionPhrases: Record<string, string> = {
|
||||
add_understanding: "Updated your business information",
|
||||
@@ -28,10 +67,10 @@ export function formatToolResponse(result: unknown, toolName: string): string {
|
||||
const parsed = JSON.parse(trimmed);
|
||||
return formatToolResponse(parsed, toolName);
|
||||
} catch {
|
||||
return trimmed;
|
||||
return stripInternalReasoning(trimmed);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
return stripInternalReasoning(result);
|
||||
}
|
||||
|
||||
if (typeof result !== "object" || result === null) {
|
||||
|
||||
@@ -0,0 +1,142 @@
|
||||
import type {
|
||||
ActiveStream,
|
||||
StreamChunk,
|
||||
VercelStreamChunk,
|
||||
} from "./chat-types";
|
||||
import {
|
||||
INITIAL_RETRY_DELAY,
|
||||
MAX_RETRIES,
|
||||
normalizeStreamChunk,
|
||||
parseSSELine,
|
||||
} from "./stream-utils";
|
||||
|
||||
function notifySubscribers(stream: ActiveStream, chunk: StreamChunk) {
|
||||
stream.chunks.push(chunk);
|
||||
for (const callback of stream.onChunkCallbacks) {
|
||||
try {
|
||||
callback(chunk);
|
||||
} catch (err) {
|
||||
console.warn("[StreamExecutor] Subscriber callback error:", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function executeStream(
|
||||
stream: ActiveStream,
|
||||
message: string,
|
||||
isUserMessage: boolean,
|
||||
context?: { url: string; content: string },
|
||||
retryCount: number = 0,
|
||||
): Promise<void> {
|
||||
const { sessionId, abortController } = stream;
|
||||
|
||||
try {
|
||||
const url = `/api/chat/sessions/${sessionId}/stream`;
|
||||
const body = JSON.stringify({
|
||||
message,
|
||||
is_user_message: isUserMessage,
|
||||
context: context || null,
|
||||
});
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "text/event-stream",
|
||||
},
|
||||
body,
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
throw new Error(errorText || `HTTP ${response.status}`);
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
throw new Error("Response body is null");
|
||||
}
|
||||
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
notifySubscribers(stream, { type: "stream_end" });
|
||||
stream.status = "completed";
|
||||
return;
|
||||
}
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split("\n");
|
||||
buffer = lines.pop() || "";
|
||||
|
||||
for (const line of lines) {
|
||||
const data = parseSSELine(line);
|
||||
if (data !== null) {
|
||||
if (data === "[DONE]") {
|
||||
notifySubscribers(stream, { type: "stream_end" });
|
||||
stream.status = "completed";
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const rawChunk = JSON.parse(data) as
|
||||
| StreamChunk
|
||||
| VercelStreamChunk;
|
||||
const chunk = normalizeStreamChunk(rawChunk);
|
||||
if (!chunk) continue;
|
||||
|
||||
notifySubscribers(stream, chunk);
|
||||
|
||||
if (chunk.type === "stream_end") {
|
||||
stream.status = "completed";
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.type === "error") {
|
||||
stream.status = "error";
|
||||
stream.error = new Error(
|
||||
chunk.message || chunk.content || "Stream error",
|
||||
);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn("[StreamExecutor] Failed to parse SSE chunk:", err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === "AbortError") {
|
||||
notifySubscribers(stream, { type: "stream_end" });
|
||||
stream.status = "completed";
|
||||
return;
|
||||
}
|
||||
|
||||
if (retryCount < MAX_RETRIES) {
|
||||
const retryDelay = INITIAL_RETRY_DELAY * Math.pow(2, retryCount);
|
||||
console.log(
|
||||
`[StreamExecutor] Retrying in ${retryDelay}ms (attempt ${retryCount + 1}/${MAX_RETRIES})`,
|
||||
);
|
||||
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
||||
return executeStream(
|
||||
stream,
|
||||
message,
|
||||
isUserMessage,
|
||||
context,
|
||||
retryCount + 1,
|
||||
);
|
||||
}
|
||||
|
||||
stream.status = "error";
|
||||
stream.error = err instanceof Error ? err : new Error("Stream failed");
|
||||
notifySubscribers(stream, {
|
||||
type: "error",
|
||||
message: stream.error.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
import type { ToolArguments, ToolResult } from "@/types/chat";
|
||||
import type { StreamChunk, VercelStreamChunk } from "./chat-types";
|
||||
|
||||
const LEGACY_STREAM_TYPES = new Set<StreamChunk["type"]>([
|
||||
"text_chunk",
|
||||
"text_ended",
|
||||
"tool_call",
|
||||
"tool_call_start",
|
||||
"tool_response",
|
||||
"login_needed",
|
||||
"need_login",
|
||||
"credentials_needed",
|
||||
"error",
|
||||
"usage",
|
||||
"stream_end",
|
||||
]);
|
||||
|
||||
export function isLegacyStreamChunk(
|
||||
chunk: StreamChunk | VercelStreamChunk,
|
||||
): chunk is StreamChunk {
|
||||
return LEGACY_STREAM_TYPES.has(chunk.type as StreamChunk["type"]);
|
||||
}
|
||||
|
||||
export function normalizeStreamChunk(
|
||||
chunk: StreamChunk | VercelStreamChunk,
|
||||
): StreamChunk | null {
|
||||
if (isLegacyStreamChunk(chunk)) return chunk;
|
||||
|
||||
switch (chunk.type) {
|
||||
case "text-delta":
|
||||
return { type: "text_chunk", content: chunk.delta };
|
||||
case "text-end":
|
||||
return { type: "text_ended" };
|
||||
case "tool-input-available":
|
||||
return {
|
||||
type: "tool_call_start",
|
||||
tool_id: chunk.toolCallId,
|
||||
tool_name: chunk.toolName,
|
||||
arguments: chunk.input as ToolArguments,
|
||||
};
|
||||
case "tool-output-available":
|
||||
return {
|
||||
type: "tool_response",
|
||||
tool_id: chunk.toolCallId,
|
||||
tool_name: chunk.toolName,
|
||||
result: chunk.output as ToolResult,
|
||||
success: chunk.success ?? true,
|
||||
};
|
||||
case "usage":
|
||||
return {
|
||||
type: "usage",
|
||||
promptTokens: chunk.promptTokens,
|
||||
completionTokens: chunk.completionTokens,
|
||||
totalTokens: chunk.totalTokens,
|
||||
};
|
||||
case "error":
|
||||
return {
|
||||
type: "error",
|
||||
message: chunk.errorText,
|
||||
code: chunk.code,
|
||||
details: chunk.details,
|
||||
};
|
||||
case "finish":
|
||||
return { type: "stream_end" };
|
||||
case "start":
|
||||
case "text-start":
|
||||
return null;
|
||||
case "tool-input-start":
|
||||
return {
|
||||
type: "tool_call_start",
|
||||
tool_id: chunk.toolCallId,
|
||||
tool_name: chunk.toolName,
|
||||
arguments: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export const MAX_RETRIES = 3;
|
||||
export const INITIAL_RETRY_DELAY = 1000;
|
||||
|
||||
export function parseSSELine(line: string): string | null {
|
||||
if (line.startsWith("data: ")) return line.slice(6);
|
||||
return null;
|
||||
}
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { useChatSession } from "./useChatSession";
|
||||
import { useChatStream } from "./useChatStream";
|
||||
|
||||
@@ -67,38 +66,16 @@ export function useChat({ urlSessionId }: UseChatArgs = {}) {
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoading || isCreating) {
|
||||
const timer = setTimeout(() => {
|
||||
setShowLoader(true);
|
||||
}, 300);
|
||||
return () => clearTimeout(timer);
|
||||
} else {
|
||||
useEffect(
|
||||
function showLoaderWithDelay() {
|
||||
if (isLoading || isCreating) {
|
||||
const timer = setTimeout(() => setShowLoader(true), 300);
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
setShowLoader(false);
|
||||
}
|
||||
}, [isLoading, isCreating]);
|
||||
|
||||
useEffect(function monitorNetworkStatus() {
|
||||
function handleOnline() {
|
||||
toast.success("Connection restored", {
|
||||
description: "You're back online",
|
||||
});
|
||||
}
|
||||
|
||||
function handleOffline() {
|
||||
toast.error("You're offline", {
|
||||
description: "Check your internet connection",
|
||||
});
|
||||
}
|
||||
|
||||
window.addEventListener("online", handleOnline);
|
||||
window.addEventListener("offline", handleOffline);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("online", handleOnline);
|
||||
window.removeEventListener("offline", handleOffline);
|
||||
};
|
||||
}, []);
|
||||
},
|
||||
[isLoading, isCreating],
|
||||
);
|
||||
|
||||
function clearSession() {
|
||||
clearSessionBase();
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { create } from "zustand";
|
||||
|
||||
interface ChatDrawerState {
|
||||
isOpen: boolean;
|
||||
open: () => void;
|
||||
close: () => void;
|
||||
toggle: () => void;
|
||||
}
|
||||
|
||||
export const useChatDrawer = create<ChatDrawerState>((set) => ({
|
||||
isOpen: false,
|
||||
open: () => set({ isOpen: true }),
|
||||
close: () => set({ isOpen: false }),
|
||||
toggle: () => set((state) => ({ isOpen: !state.isOpen })),
|
||||
}));
|
||||
@@ -1,6 +1,7 @@
|
||||
import {
|
||||
getGetV2GetSessionQueryKey,
|
||||
getGetV2GetSessionQueryOptions,
|
||||
getGetV2ListSessionsQueryKey,
|
||||
postV2CreateSession,
|
||||
useGetV2GetSession,
|
||||
usePatchV2SessionAssignUser,
|
||||
@@ -101,6 +102,17 @@ export function useChatSession({
|
||||
}
|
||||
}, [createError, loadError]);
|
||||
|
||||
useEffect(
|
||||
function refreshSessionsListOnLoad() {
|
||||
if (sessionId && sessionData && !isLoadingSession) {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListSessionsQueryKey(),
|
||||
});
|
||||
}
|
||||
},
|
||||
[sessionId, sessionData, isLoadingSession, queryClient],
|
||||
);
|
||||
|
||||
async function createSession() {
|
||||
try {
|
||||
setError(null);
|
||||
|
||||
@@ -1,543 +1,110 @@
|
||||
import type { ToolArguments, ToolResult } from "@/types/chat";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { useChatStore } from "./chat-store";
|
||||
import type { StreamChunk } from "./chat-types";
|
||||
|
||||
const MAX_RETRIES = 3;
|
||||
const INITIAL_RETRY_DELAY = 1000;
|
||||
|
||||
export interface StreamChunk {
|
||||
type:
|
||||
| "text_chunk"
|
||||
| "text_ended"
|
||||
| "tool_call"
|
||||
| "tool_call_start"
|
||||
| "tool_response"
|
||||
| "login_needed"
|
||||
| "need_login"
|
||||
| "credentials_needed"
|
||||
| "error"
|
||||
| "usage"
|
||||
| "stream_end";
|
||||
timestamp?: string;
|
||||
content?: string;
|
||||
message?: string;
|
||||
code?: string;
|
||||
details?: Record<string, unknown>;
|
||||
tool_id?: string;
|
||||
tool_name?: string;
|
||||
arguments?: ToolArguments;
|
||||
result?: ToolResult;
|
||||
success?: boolean;
|
||||
idx?: number;
|
||||
session_id?: string;
|
||||
agent_info?: {
|
||||
graph_id: string;
|
||||
name: string;
|
||||
trigger_type: string;
|
||||
};
|
||||
provider?: string;
|
||||
provider_name?: string;
|
||||
credential_type?: string;
|
||||
scopes?: string[];
|
||||
title?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
type VercelStreamChunk =
|
||||
| { type: "start"; messageId: string }
|
||||
| { type: "finish" }
|
||||
| { type: "text-start"; id: string }
|
||||
| { type: "text-delta"; id: string; delta: string }
|
||||
| { type: "text-end"; id: string }
|
||||
| { type: "tool-input-start"; toolCallId: string; toolName: string }
|
||||
| {
|
||||
type: "tool-input-available";
|
||||
toolCallId: string;
|
||||
toolName: string;
|
||||
input: ToolArguments;
|
||||
}
|
||||
| {
|
||||
type: "tool-output-available";
|
||||
toolCallId: string;
|
||||
toolName?: string;
|
||||
output: ToolResult;
|
||||
success?: boolean;
|
||||
}
|
||||
| {
|
||||
type: "usage";
|
||||
promptTokens: number;
|
||||
completionTokens: number;
|
||||
totalTokens: number;
|
||||
}
|
||||
| {
|
||||
type: "error";
|
||||
errorText: string;
|
||||
code?: string;
|
||||
details?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
const LEGACY_STREAM_TYPES = new Set<StreamChunk["type"]>([
|
||||
"text_chunk",
|
||||
"text_ended",
|
||||
"tool_call",
|
||||
"tool_call_start",
|
||||
"tool_response",
|
||||
"login_needed",
|
||||
"need_login",
|
||||
"credentials_needed",
|
||||
"error",
|
||||
"usage",
|
||||
"stream_end",
|
||||
]);
|
||||
|
||||
function isLegacyStreamChunk(
|
||||
chunk: StreamChunk | VercelStreamChunk,
|
||||
): chunk is StreamChunk {
|
||||
return LEGACY_STREAM_TYPES.has(chunk.type as StreamChunk["type"]);
|
||||
}
|
||||
|
||||
function normalizeStreamChunk(
|
||||
chunk: StreamChunk | VercelStreamChunk,
|
||||
): StreamChunk | null {
|
||||
if (isLegacyStreamChunk(chunk)) {
|
||||
return chunk;
|
||||
}
|
||||
switch (chunk.type) {
|
||||
case "text-delta":
|
||||
return { type: "text_chunk", content: chunk.delta };
|
||||
case "text-end":
|
||||
return { type: "text_ended" };
|
||||
case "tool-input-available":
|
||||
return {
|
||||
type: "tool_call_start",
|
||||
tool_id: chunk.toolCallId,
|
||||
tool_name: chunk.toolName,
|
||||
arguments: chunk.input,
|
||||
};
|
||||
case "tool-output-available":
|
||||
return {
|
||||
type: "tool_response",
|
||||
tool_id: chunk.toolCallId,
|
||||
tool_name: chunk.toolName,
|
||||
result: chunk.output,
|
||||
success: chunk.success ?? true,
|
||||
};
|
||||
case "usage":
|
||||
return {
|
||||
type: "usage",
|
||||
promptTokens: chunk.promptTokens,
|
||||
completionTokens: chunk.completionTokens,
|
||||
totalTokens: chunk.totalTokens,
|
||||
};
|
||||
case "error":
|
||||
return {
|
||||
type: "error",
|
||||
message: chunk.errorText,
|
||||
code: chunk.code,
|
||||
details: chunk.details,
|
||||
};
|
||||
case "finish":
|
||||
return { type: "stream_end" };
|
||||
case "start":
|
||||
case "text-start":
|
||||
return null;
|
||||
case "tool-input-start":
|
||||
const toolInputStart = chunk as Extract<
|
||||
VercelStreamChunk,
|
||||
{ type: "tool-input-start" }
|
||||
>;
|
||||
return {
|
||||
type: "tool_call_start",
|
||||
tool_id: toolInputStart.toolCallId,
|
||||
tool_name: toolInputStart.toolName,
|
||||
arguments: {},
|
||||
};
|
||||
}
|
||||
}
|
||||
export type { StreamChunk } from "./chat-types";
|
||||
|
||||
export function useChatStream() {
|
||||
const [isStreaming, setIsStreaming] = useState(false);
|
||||
const [error, setError] = useState<Error | null>(null);
|
||||
const retryCountRef = useRef<number>(0);
|
||||
const retryTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const abortControllerRef = useRef<AbortController | null>(null);
|
||||
const currentSessionIdRef = useRef<string | null>(null);
|
||||
const requestStartTimeRef = useRef<number | null>(null);
|
||||
|
||||
const stopStreaming = useCallback(
|
||||
(sessionId?: string, force: boolean = false) => {
|
||||
console.log("[useChatStream] stopStreaming called", {
|
||||
hasAbortController: !!abortControllerRef.current,
|
||||
isAborted: abortControllerRef.current?.signal.aborted,
|
||||
currentSessionId: currentSessionIdRef.current,
|
||||
requestedSessionId: sessionId,
|
||||
requestStartTime: requestStartTimeRef.current,
|
||||
timeSinceStart: requestStartTimeRef.current
|
||||
? Date.now() - requestStartTimeRef.current
|
||||
: null,
|
||||
force,
|
||||
stack: new Error().stack,
|
||||
});
|
||||
|
||||
if (
|
||||
sessionId &&
|
||||
currentSessionIdRef.current &&
|
||||
currentSessionIdRef.current !== sessionId
|
||||
) {
|
||||
console.log(
|
||||
"[useChatStream] Session changed, aborting previous stream",
|
||||
{
|
||||
oldSessionId: currentSessionIdRef.current,
|
||||
newSessionId: sessionId,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
const controller = abortControllerRef.current;
|
||||
if (controller) {
|
||||
const timeSinceStart = requestStartTimeRef.current
|
||||
? Date.now() - requestStartTimeRef.current
|
||||
: null;
|
||||
|
||||
if (!force && timeSinceStart !== null && timeSinceStart < 100) {
|
||||
console.log(
|
||||
"[useChatStream] Request just started (<100ms), skipping abort to prevent race condition",
|
||||
{
|
||||
timeSinceStart,
|
||||
},
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const signal = controller.signal;
|
||||
|
||||
if (
|
||||
signal &&
|
||||
typeof signal.aborted === "boolean" &&
|
||||
!signal.aborted
|
||||
) {
|
||||
console.log("[useChatStream] Aborting stream");
|
||||
controller.abort();
|
||||
} else {
|
||||
console.log(
|
||||
"[useChatStream] Stream already aborted or signal invalid",
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.name === "AbortError") {
|
||||
console.log(
|
||||
"[useChatStream] AbortError caught (expected during cleanup)",
|
||||
);
|
||||
} else {
|
||||
console.warn("[useChatStream] Error aborting stream:", error);
|
||||
}
|
||||
} finally {
|
||||
abortControllerRef.current = null;
|
||||
requestStartTimeRef.current = null;
|
||||
}
|
||||
}
|
||||
if (retryTimeoutRef.current) {
|
||||
clearTimeout(retryTimeoutRef.current);
|
||||
retryTimeoutRef.current = null;
|
||||
}
|
||||
setIsStreaming(false);
|
||||
},
|
||||
[],
|
||||
const onChunkCallbackRef = useRef<((chunk: StreamChunk) => void) | null>(
|
||||
null,
|
||||
);
|
||||
|
||||
const stopStream = useChatStore((s) => s.stopStream);
|
||||
const unregisterActiveSession = useChatStore(
|
||||
(s) => s.unregisterActiveSession,
|
||||
);
|
||||
const isSessionActive = useChatStore((s) => s.isSessionActive);
|
||||
const onStreamComplete = useChatStore((s) => s.onStreamComplete);
|
||||
const getCompletedStream = useChatStore((s) => s.getCompletedStream);
|
||||
const registerActiveSession = useChatStore((s) => s.registerActiveSession);
|
||||
const startStream = useChatStore((s) => s.startStream);
|
||||
const getStreamStatus = useChatStore((s) => s.getStreamStatus);
|
||||
|
||||
function stopStreaming(sessionId?: string) {
|
||||
const targetSession = sessionId || currentSessionIdRef.current;
|
||||
if (targetSession) {
|
||||
stopStream(targetSession);
|
||||
unregisterActiveSession(targetSession);
|
||||
}
|
||||
setIsStreaming(false);
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
console.log("[useChatStream] Component mounted");
|
||||
return () => {
|
||||
const sessionIdAtUnmount = currentSessionIdRef.current;
|
||||
console.log(
|
||||
"[useChatStream] Component unmounting, calling stopStreaming",
|
||||
{
|
||||
sessionIdAtUnmount,
|
||||
},
|
||||
);
|
||||
stopStreaming(undefined, false);
|
||||
return function cleanup() {
|
||||
const sessionId = currentSessionIdRef.current;
|
||||
if (sessionId && !isSessionActive(sessionId)) {
|
||||
stopStream(sessionId);
|
||||
}
|
||||
currentSessionIdRef.current = null;
|
||||
onChunkCallbackRef.current = null;
|
||||
};
|
||||
}, [stopStreaming]);
|
||||
}, []);
|
||||
|
||||
const sendMessage = useCallback(
|
||||
async (
|
||||
sessionId: string,
|
||||
message: string,
|
||||
onChunk: (chunk: StreamChunk) => void,
|
||||
isUserMessage: boolean = true,
|
||||
context?: { url: string; content: string },
|
||||
isRetry: boolean = false,
|
||||
) => {
|
||||
console.log("[useChatStream] sendMessage called", {
|
||||
sessionId,
|
||||
message: message.substring(0, 50),
|
||||
isUserMessage,
|
||||
isRetry,
|
||||
stack: new Error().stack,
|
||||
});
|
||||
useEffect(() => {
|
||||
const unsubscribe = onStreamComplete(
|
||||
function handleStreamComplete(completedSessionId) {
|
||||
if (completedSessionId !== currentSessionIdRef.current) return;
|
||||
|
||||
const previousSessionId = currentSessionIdRef.current;
|
||||
stopStreaming(sessionId, true);
|
||||
currentSessionIdRef.current = sessionId;
|
||||
|
||||
const abortController = new AbortController();
|
||||
abortControllerRef.current = abortController;
|
||||
requestStartTimeRef.current = Date.now();
|
||||
console.log("[useChatStream] Created new AbortController", {
|
||||
sessionId,
|
||||
previousSessionId,
|
||||
requestStartTime: requestStartTimeRef.current,
|
||||
});
|
||||
|
||||
if (abortController.signal.aborted) {
|
||||
console.warn(
|
||||
"[useChatStream] AbortController was aborted before request started",
|
||||
);
|
||||
requestStartTimeRef.current = null;
|
||||
return Promise.reject(new Error("Request aborted"));
|
||||
}
|
||||
|
||||
if (!isRetry) {
|
||||
retryCountRef.current = 0;
|
||||
}
|
||||
setIsStreaming(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const url = `/api/chat/sessions/${sessionId}/stream`;
|
||||
const body = JSON.stringify({
|
||||
message,
|
||||
is_user_message: isUserMessage,
|
||||
context: context || null,
|
||||
});
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "text/event-stream",
|
||||
},
|
||||
body,
|
||||
signal: abortController.signal,
|
||||
});
|
||||
|
||||
console.info("[useChatStream] Stream response", {
|
||||
sessionId,
|
||||
status: response.status,
|
||||
ok: response.ok,
|
||||
contentType: response.headers.get("content-type"),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.warn("[useChatStream] Stream response error", {
|
||||
sessionId,
|
||||
status: response.status,
|
||||
errorText,
|
||||
});
|
||||
throw new Error(errorText || `HTTP ${response.status}`);
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
console.warn("[useChatStream] Response body is null", { sessionId });
|
||||
throw new Error("Response body is null");
|
||||
}
|
||||
|
||||
const reader = response.body.getReader();
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = "";
|
||||
let receivedChunkCount = 0;
|
||||
let firstChunkAt: number | null = null;
|
||||
let loggedLineCount = 0;
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
let didDispatchStreamEnd = false;
|
||||
|
||||
function dispatchStreamEnd() {
|
||||
if (didDispatchStreamEnd) return;
|
||||
didDispatchStreamEnd = true;
|
||||
onChunk({ type: "stream_end" });
|
||||
}
|
||||
|
||||
const cleanup = () => {
|
||||
reader.cancel().catch(() => {
|
||||
// Ignore cancel errors
|
||||
});
|
||||
};
|
||||
|
||||
async function readStream() {
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
|
||||
if (done) {
|
||||
cleanup();
|
||||
console.info("[useChatStream] Stream closed", {
|
||||
sessionId,
|
||||
receivedChunkCount,
|
||||
timeSinceStart: requestStartTimeRef.current
|
||||
? Date.now() - requestStartTimeRef.current
|
||||
: null,
|
||||
});
|
||||
dispatchStreamEnd();
|
||||
retryCountRef.current = 0;
|
||||
stopStreaming();
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
buffer += decoder.decode(value, { stream: true });
|
||||
const lines = buffer.split("\n");
|
||||
buffer = lines.pop() || "";
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("data: ")) {
|
||||
const data = line.slice(6);
|
||||
if (loggedLineCount < 3) {
|
||||
console.info("[useChatStream] Raw stream line", {
|
||||
sessionId,
|
||||
data:
|
||||
data.length > 300 ? `${data.slice(0, 300)}...` : data,
|
||||
});
|
||||
loggedLineCount += 1;
|
||||
}
|
||||
if (data === "[DONE]") {
|
||||
cleanup();
|
||||
console.info("[useChatStream] Stream done marker", {
|
||||
sessionId,
|
||||
receivedChunkCount,
|
||||
timeSinceStart: requestStartTimeRef.current
|
||||
? Date.now() - requestStartTimeRef.current
|
||||
: null,
|
||||
});
|
||||
dispatchStreamEnd();
|
||||
retryCountRef.current = 0;
|
||||
stopStreaming();
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const rawChunk = JSON.parse(data) as
|
||||
| StreamChunk
|
||||
| VercelStreamChunk;
|
||||
const chunk = normalizeStreamChunk(rawChunk);
|
||||
if (!chunk) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!firstChunkAt) {
|
||||
firstChunkAt = Date.now();
|
||||
console.info("[useChatStream] First stream chunk", {
|
||||
sessionId,
|
||||
chunkType: chunk.type,
|
||||
timeSinceStart: requestStartTimeRef.current
|
||||
? firstChunkAt - requestStartTimeRef.current
|
||||
: null,
|
||||
});
|
||||
}
|
||||
receivedChunkCount += 1;
|
||||
|
||||
// Call the chunk handler
|
||||
onChunk(chunk);
|
||||
|
||||
// Handle stream lifecycle
|
||||
if (chunk.type === "stream_end") {
|
||||
didDispatchStreamEnd = true;
|
||||
cleanup();
|
||||
console.info("[useChatStream] Stream end chunk", {
|
||||
sessionId,
|
||||
receivedChunkCount,
|
||||
timeSinceStart: requestStartTimeRef.current
|
||||
? Date.now() - requestStartTimeRef.current
|
||||
: null,
|
||||
});
|
||||
retryCountRef.current = 0;
|
||||
stopStreaming();
|
||||
resolve();
|
||||
return;
|
||||
} else if (chunk.type === "error") {
|
||||
cleanup();
|
||||
reject(
|
||||
new Error(
|
||||
chunk.message || chunk.content || "Stream error",
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
} catch (err) {
|
||||
// Skip invalid JSON lines
|
||||
console.warn("Failed to parse SSE chunk:", err, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === "AbortError") {
|
||||
cleanup();
|
||||
dispatchStreamEnd();
|
||||
stopStreaming();
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
|
||||
const streamError =
|
||||
err instanceof Error ? err : new Error("Failed to read stream");
|
||||
|
||||
if (retryCountRef.current < MAX_RETRIES) {
|
||||
retryCountRef.current += 1;
|
||||
const retryDelay =
|
||||
INITIAL_RETRY_DELAY * Math.pow(2, retryCountRef.current - 1);
|
||||
|
||||
toast.info("Connection interrupted", {
|
||||
description: `Retrying in ${retryDelay / 1000} seconds...`,
|
||||
});
|
||||
|
||||
retryTimeoutRef.current = setTimeout(() => {
|
||||
sendMessage(
|
||||
sessionId,
|
||||
message,
|
||||
onChunk,
|
||||
isUserMessage,
|
||||
context,
|
||||
true,
|
||||
).catch((_err) => {
|
||||
// Retry failed
|
||||
});
|
||||
}, retryDelay);
|
||||
} else {
|
||||
setError(streamError);
|
||||
toast.error("Connection Failed", {
|
||||
description:
|
||||
"Unable to connect to chat service. Please try again.",
|
||||
});
|
||||
cleanup();
|
||||
dispatchStreamEnd();
|
||||
retryCountRef.current = 0;
|
||||
stopStreaming();
|
||||
reject(streamError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
readStream();
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.name === "AbortError") {
|
||||
setIsStreaming(false);
|
||||
return Promise.resolve();
|
||||
}
|
||||
const streamError =
|
||||
err instanceof Error ? err : new Error("Failed to start stream");
|
||||
setError(streamError);
|
||||
setIsStreaming(false);
|
||||
throw streamError;
|
||||
const completed = getCompletedStream(completedSessionId);
|
||||
if (completed?.error) {
|
||||
setError(completed.error);
|
||||
}
|
||||
unregisterActiveSession(completedSessionId);
|
||||
},
|
||||
);
|
||||
|
||||
return unsubscribe;
|
||||
}, []);
|
||||
|
||||
async function sendMessage(
|
||||
sessionId: string,
|
||||
message: string,
|
||||
onChunk: (chunk: StreamChunk) => void,
|
||||
isUserMessage: boolean = true,
|
||||
context?: { url: string; content: string },
|
||||
) {
|
||||
const previousSessionId = currentSessionIdRef.current;
|
||||
if (previousSessionId && previousSessionId !== sessionId) {
|
||||
stopStreaming(previousSessionId);
|
||||
}
|
||||
|
||||
currentSessionIdRef.current = sessionId;
|
||||
onChunkCallbackRef.current = onChunk;
|
||||
setIsStreaming(true);
|
||||
setError(null);
|
||||
|
||||
registerActiveSession(sessionId);
|
||||
|
||||
try {
|
||||
await startStream(sessionId, message, isUserMessage, context, onChunk);
|
||||
|
||||
const status = getStreamStatus(sessionId);
|
||||
if (status === "error") {
|
||||
const completed = getCompletedStream(sessionId);
|
||||
if (completed?.error) {
|
||||
setError(completed.error);
|
||||
toast.error("Connection Failed", {
|
||||
description: "Unable to connect to chat service. Please try again.",
|
||||
});
|
||||
throw completed.error;
|
||||
}
|
||||
}
|
||||
},
|
||||
[stopStreaming],
|
||||
);
|
||||
} catch (err) {
|
||||
const streamError =
|
||||
err instanceof Error ? err : new Error("Failed to start stream");
|
||||
setError(streamError);
|
||||
throw streamError;
|
||||
} finally {
|
||||
setIsStreaming(false);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isStreaming,
|
||||
|
||||
@@ -0,0 +1,72 @@
|
||||
"use client";
|
||||
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { environment } from "@/services/environment";
|
||||
import { PostHogProvider as PHProvider } from "@posthog/react";
|
||||
import { usePathname, useSearchParams } from "next/navigation";
|
||||
import posthog from "posthog-js";
|
||||
import { ReactNode, useEffect, useRef } from "react";
|
||||
|
||||
export function PostHogProvider({ children }: { children: ReactNode }) {
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
const postHogCredentials = environment.getPostHogCredentials();
|
||||
|
||||
useEffect(() => {
|
||||
if (postHogCredentials.key) {
|
||||
posthog.init(postHogCredentials.key, {
|
||||
api_host: postHogCredentials.host,
|
||||
defaults: "2025-11-30",
|
||||
capture_pageview: false,
|
||||
capture_pageleave: true,
|
||||
autocapture: true,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
if (!isPostHogEnabled) return <>{children}</>;
|
||||
|
||||
return <PHProvider client={posthog}>{children}</PHProvider>;
|
||||
}
|
||||
|
||||
export function PostHogUserTracker() {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const previousUserIdRef = useRef<string | null>(null);
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
|
||||
useEffect(() => {
|
||||
if (isUserLoading || !isPostHogEnabled) return;
|
||||
|
||||
if (user) {
|
||||
if (previousUserIdRef.current !== user.id) {
|
||||
posthog.identify(user.id, {
|
||||
email: user.email,
|
||||
...(user.user_metadata?.name && { name: user.user_metadata.name }),
|
||||
});
|
||||
previousUserIdRef.current = user.id;
|
||||
}
|
||||
} else if (previousUserIdRef.current !== null) {
|
||||
posthog.reset();
|
||||
previousUserIdRef.current = null;
|
||||
}
|
||||
}, [user, isUserLoading, isPostHogEnabled]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function PostHogPageViewTracker() {
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
const isPostHogEnabled = environment.isPostHogEnabled();
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname && isPostHogEnabled) {
|
||||
let url = window.origin + pathname;
|
||||
if (searchParams && searchParams.toString()) {
|
||||
url = url + `?${searchParams.toString()}`;
|
||||
}
|
||||
posthog.capture("$pageview", { $current_url: url });
|
||||
}
|
||||
}, [pathname, searchParams, isPostHogEnabled]);
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -76,6 +76,13 @@ function getPreviewStealingDev() {
|
||||
return branch;
|
||||
}
|
||||
|
||||
function getPostHogCredentials() {
|
||||
return {
|
||||
key: process.env.NEXT_PUBLIC_POSTHOG_KEY,
|
||||
host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
|
||||
};
|
||||
}
|
||||
|
||||
function isProductionBuild() {
|
||||
return process.env.NODE_ENV === "production";
|
||||
}
|
||||
@@ -116,6 +123,13 @@ function areFeatureFlagsEnabled() {
|
||||
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "enabled";
|
||||
}
|
||||
|
||||
function isPostHogEnabled() {
|
||||
const inCloud = isCloud();
|
||||
const key = process.env.NEXT_PUBLIC_POSTHOG_KEY;
|
||||
const host = process.env.NEXT_PUBLIC_POSTHOG_HOST;
|
||||
return inCloud && key && host;
|
||||
}
|
||||
|
||||
export const environment = {
|
||||
// Generic
|
||||
getEnvironmentStr,
|
||||
@@ -128,6 +142,7 @@ export const environment = {
|
||||
getSupabaseUrl,
|
||||
getSupabaseAnonKey,
|
||||
getPreviewStealingDev,
|
||||
getPostHogCredentials,
|
||||
// Assertions
|
||||
isServerSide,
|
||||
isClientSide,
|
||||
@@ -138,5 +153,6 @@ export const environment = {
|
||||
isCloud,
|
||||
isLocal,
|
||||
isVercelPreview,
|
||||
isPostHogEnabled,
|
||||
areFeatureFlagsEnabled,
|
||||
};
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
"use client";
|
||||
|
||||
import { useNetworkStatus } from "./useNetworkStatus";
|
||||
|
||||
export function NetworkStatusMonitor() {
|
||||
useNetworkStatus();
|
||||
return null;
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect } from "react";
|
||||
import { toast } from "sonner";
|
||||
|
||||
export function useNetworkStatus() {
|
||||
useEffect(function monitorNetworkStatus() {
|
||||
function handleOnline() {
|
||||
toast.success("Connection restored", {
|
||||
description: "You're back online",
|
||||
});
|
||||
}
|
||||
|
||||
function handleOffline() {
|
||||
toast.error("You're offline", {
|
||||
description: "Check your internet connection",
|
||||
});
|
||||
}
|
||||
|
||||
window.addEventListener("online", handleOnline);
|
||||
window.addEventListener("offline", handleOffline);
|
||||
|
||||
return function cleanup() {
|
||||
window.removeEventListener("online", handleOnline);
|
||||
window.removeEventListener("offline", handleOffline);
|
||||
};
|
||||
}, []);
|
||||
}
|
||||
@@ -65,7 +65,7 @@ The result routes data to yes_output or no_output, enabling intelligent branchin
|
||||
| condition | A plaintext English description of the condition to evaluate | str | Yes |
|
||||
| yes_value | (Optional) Value to output if the condition is true. If not provided, input_value will be used. | Yes Value | No |
|
||||
| no_value | (Optional) Value to output if the condition is false. If not provided, input_value will be used. | No Value | No |
|
||||
| model | The language model to use for evaluating the condition. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for evaluating the condition. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
|
||||
### Outputs
|
||||
|
||||
@@ -103,7 +103,7 @@ The block sends the entire conversation history to the chosen LLM, including sys
|
||||
|-------|-------------|------|----------|
|
||||
| prompt | The prompt to send to the language model. | str | No |
|
||||
| messages | List of messages in the conversation. | List[Any] | Yes |
|
||||
| model | The language model to use for the conversation. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for the conversation. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| max_tokens | The maximum number of tokens to generate in the chat completion. | int | No |
|
||||
| ollama_host | Ollama host for local models | str | No |
|
||||
|
||||
@@ -257,7 +257,7 @@ The block formulates a prompt based on the given focus or source data, sends it
|
||||
|-------|-------------|------|----------|
|
||||
| focus | The focus of the list to generate. | str | No |
|
||||
| source_data | The data to generate the list from. | str | No |
|
||||
| model | The language model to use for generating the list. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for generating the list. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| max_retries | Maximum number of retries for generating a valid list. | int | No |
|
||||
| force_json_output | Whether to force the LLM to produce a JSON-only response. This can increase the block's reliability, but may also reduce the quality of the response because it prohibits the LLM from reasoning before providing its JSON response. | bool | No |
|
||||
| max_tokens | The maximum number of tokens to generate in the chat completion. | int | No |
|
||||
@@ -424,7 +424,7 @@ The block sends the input prompt to a chosen LLM, along with any system prompts
|
||||
| prompt | The prompt to send to the language model. | str | Yes |
|
||||
| expected_format | Expected format of the response. If provided, the response will be validated against this format. The keys should be the expected fields in the response, and the values should be the description of the field. | Dict[str, str] | Yes |
|
||||
| list_result | Whether the response should be a list of objects in the expected format. | bool | No |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| force_json_output | Whether to force the LLM to produce a JSON-only response. This can increase the block's reliability, but may also reduce the quality of the response because it prohibits the LLM from reasoning before providing its JSON response. | bool | No |
|
||||
| sys_prompt | The system prompt to provide additional context to the model. | str | No |
|
||||
| conversation_history | The conversation history to provide context for the prompt. | List[Dict[str, Any]] | No |
|
||||
@@ -464,7 +464,7 @@ The block sends the input prompt to a chosen LLM, processes the response, and re
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| prompt | The prompt to send to the language model. You can use any of the {keys} from Prompt Values to fill in the prompt with values from the prompt values dictionary by putting them in curly braces. | str | Yes |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| sys_prompt | The system prompt to provide additional context to the model. | str | No |
|
||||
| retry | Number of times to retry the LLM call if the response does not match the expected format. | int | No |
|
||||
| prompt_values | Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}. | Dict[str, str] | No |
|
||||
@@ -501,7 +501,7 @@ The block splits the input text into smaller chunks, sends each chunk to an LLM
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| text | The text to summarize. | str | Yes |
|
||||
| model | The language model to use for summarizing the text. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for summarizing the text. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| focus | The topic to focus on in the summary | str | No |
|
||||
| style | The style of the summary to generate. | "concise" \| "detailed" \| "bullet points" \| "numbered list" | No |
|
||||
| max_tokens | The maximum number of tokens to generate in the chat completion. | int | No |
|
||||
@@ -763,7 +763,7 @@ Configure agent_mode_max_iterations to control loop behavior: 0 for single decis
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| prompt | The prompt to send to the language model. | str | Yes |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| model | The language model to use for answering the prompt. | "o3-mini" \| "o3-2025-04-16" \| "o1" \| "o1-mini" \| "gpt-5.2-2025-12-11" \| "gpt-5.1-2025-11-13" \| "gpt-5-2025-08-07" \| "gpt-5-mini-2025-08-07" \| "gpt-5-nano-2025-08-07" \| "gpt-5-chat-latest" \| "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "gpt-4o-mini" \| "gpt-4o" \| "gpt-4-turbo" \| "gpt-3.5-turbo" \| "claude-opus-4-1-20250805" \| "claude-opus-4-20250514" \| "claude-sonnet-4-20250514" \| "claude-opus-4-5-20251101" \| "claude-sonnet-4-5-20250929" \| "claude-haiku-4-5-20251001" \| "claude-3-7-sonnet-20250219" \| "claude-3-haiku-20240307" \| "Qwen/Qwen2.5-72B-Instruct-Turbo" \| "nvidia/llama-3.1-nemotron-70b-instruct" \| "meta-llama/Llama-3.3-70B-Instruct-Turbo" \| "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo" \| "meta-llama/Llama-3.2-3B-Instruct-Turbo" \| "llama-3.3-70b-versatile" \| "llama-3.1-8b-instant" \| "llama3.3" \| "llama3.2" \| "llama3" \| "llama3.1:405b" \| "dolphin-mistral:latest" \| "openai/gpt-oss-120b" \| "openai/gpt-oss-20b" \| "google/gemini-2.5-pro-preview-03-25" \| "google/gemini-3-pro-preview" \| "google/gemini-2.5-flash" \| "google/gemini-2.0-flash-001" \| "google/gemini-2.5-flash-lite-preview-06-17" \| "google/gemini-2.0-flash-lite-001" \| "mistralai/mistral-nemo" \| "cohere/command-r-08-2024" \| "cohere/command-r-plus-08-2024" \| "deepseek/deepseek-chat" \| "deepseek/deepseek-r1-0528" \| "perplexity/sonar" \| "perplexity/sonar-pro" \| "perplexity/sonar-deep-research" \| "nousresearch/hermes-3-llama-3.1-405b" \| "nousresearch/hermes-3-llama-3.1-70b" \| "amazon/nova-lite-v1" \| "amazon/nova-micro-v1" \| "amazon/nova-pro-v1" \| "microsoft/wizardlm-2-8x22b" \| "gryphe/mythomax-l2-13b" \| "meta-llama/llama-4-scout" \| "meta-llama/llama-4-maverick" \| "x-ai/grok-4" \| "x-ai/grok-4-fast" \| "x-ai/grok-4.1-fast" \| "x-ai/grok-code-fast-1" \| "moonshotai/kimi-k2" \| "qwen/qwen3-235b-a22b-thinking-2507" \| "qwen/qwen3-coder" \| "Llama-4-Scout-17B-16E-Instruct-FP8" \| "Llama-4-Maverick-17B-128E-Instruct-FP8" \| "Llama-3.3-8B-Instruct" \| "Llama-3.3-70B-Instruct" \| "v0-1.5-md" \| "v0-1.5-lg" \| "v0-1.0-md" | No |
|
||||
| multiple_tool_calls | Whether to allow multiple tool calls in a single response. | bool | No |
|
||||
| sys_prompt | The system prompt to provide additional context to the model. | str | No |
|
||||
| conversation_history | The conversation history to provide context for the prompt. | List[Dict[str, Any]] | No |
|
||||
|
||||
@@ -20,7 +20,7 @@ Configure timeouts for DOM settlement and page loading. Variables can be passed
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| browserbase_project_id | Browserbase project ID (required if using Browserbase) | str | Yes |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-sonnet-4-5-20250929" | No |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-3-7-sonnet-20250219" | No |
|
||||
| url | URL to navigate to. | str | Yes |
|
||||
| action | Action to perform. Suggested actions are: click, fill, type, press, scroll, select from dropdown. For multi-step actions, add an entry for each step. | List[str] | Yes |
|
||||
| variables | Variables to use in the action. Variables contains data you want the action to use. | Dict[str, str] | No |
|
||||
@@ -65,7 +65,7 @@ Supports searching within iframes and configurable timeouts for dynamic content
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| browserbase_project_id | Browserbase project ID (required if using Browserbase) | str | Yes |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-sonnet-4-5-20250929" | No |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-3-7-sonnet-20250219" | No |
|
||||
| url | URL to navigate to. | str | Yes |
|
||||
| instruction | Natural language description of elements or actions to discover. | str | Yes |
|
||||
| iframes | Whether to search within iframes. If True, Stagehand will search for actions within iframes. | bool | No |
|
||||
@@ -106,7 +106,7 @@ Use this to explore a page's interactive elements before building automated work
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| browserbase_project_id | Browserbase project ID (required if using Browserbase) | str | Yes |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-sonnet-4-5-20250929" | No |
|
||||
| model | LLM to use for Stagehand (provider is inferred) | "gpt-4.1-2025-04-14" \| "gpt-4.1-mini-2025-04-14" \| "claude-3-7-sonnet-20250219" | No |
|
||||
| url | URL to navigate to. | str | Yes |
|
||||
| instruction | Natural language description of elements or actions to discover. | str | Yes |
|
||||
| iframes | Whether to search within iframes. If True, Stagehand will search for actions within iframes. | bool | No |
|
||||
|
||||
Reference in New Issue
Block a user