Compare commits
8 Commits
feat/agent
...
swiftyos/p
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
174e4cc63a | ||
|
|
0af5835030 | ||
|
|
0d82e635ce | ||
|
|
b049c7a756 | ||
|
|
7892590b12 | ||
|
|
76c751bd56 | ||
|
|
82d7134fc6 | ||
|
|
90466908a8 |
@@ -178,5 +178,10 @@ AYRSHARE_JWT_KEY=
|
||||
SMARTLEAD_API_KEY=
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# PostHog Analytics
|
||||
# Get API key from https://posthog.com - Project Settings > Project API Key
|
||||
POSTHOG_API_KEY=
|
||||
POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
# Other Services
|
||||
AUTOMOD_API_KEY=
|
||||
|
||||
@@ -48,6 +48,7 @@ from .response_model import (
|
||||
StreamUsage,
|
||||
)
|
||||
from .tools import execute_tool, tools
|
||||
from .tracking import track_user_message
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -103,16 +104,33 @@ async def _build_system_prompt(user_id: str | None) -> tuple[str, Any]:
|
||||
return compiled, understanding
|
||||
|
||||
|
||||
async def _generate_session_title(message: str) -> str | None:
|
||||
async def _generate_session_title(
|
||||
message: str,
|
||||
user_id: str | None = None,
|
||||
session_id: str | None = None,
|
||||
) -> str | None:
|
||||
"""Generate a concise title for a chat session based on the first message.
|
||||
|
||||
Args:
|
||||
message: The first user message in the session
|
||||
user_id: User ID for OpenRouter tracing (optional)
|
||||
session_id: Session ID for OpenRouter tracing (optional)
|
||||
|
||||
Returns:
|
||||
A short title (3-6 words) or None if generation fails
|
||||
"""
|
||||
try:
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {}
|
||||
if user_id:
|
||||
extra_body["user"] = user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = user_id
|
||||
if session_id:
|
||||
extra_body["session_id"] = session_id[:128] # OpenRouter limit
|
||||
extra_body["posthogProperties"] = {
|
||||
"environment": settings.config.app_env.value,
|
||||
}
|
||||
|
||||
response = await client.chat.completions.create(
|
||||
model=config.title_model,
|
||||
messages=[
|
||||
@@ -127,6 +145,7 @@ async def _generate_session_title(message: str) -> str | None:
|
||||
{"role": "user", "content": message[:500]}, # Limit input length
|
||||
],
|
||||
max_tokens=20,
|
||||
extra_body=extra_body,
|
||||
)
|
||||
title = response.choices[0].message.content
|
||||
if title:
|
||||
@@ -237,6 +256,14 @@ async def stream_chat_completion(
|
||||
f"new message_count={len(session.messages)}"
|
||||
)
|
||||
|
||||
# Track user message in PostHog
|
||||
if is_user_message:
|
||||
track_user_message(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
message_length=len(message),
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Upserting session: {session.session_id} with user id {session.user_id}, "
|
||||
f"message_count={len(session.messages)}"
|
||||
@@ -256,10 +283,15 @@ async def stream_chat_completion(
|
||||
# stale data issues when the main flow modifies the session
|
||||
captured_session_id = session_id
|
||||
captured_message = message
|
||||
captured_user_id = user_id
|
||||
|
||||
async def _update_title():
|
||||
try:
|
||||
title = await _generate_session_title(captured_message)
|
||||
title = await _generate_session_title(
|
||||
captured_message,
|
||||
user_id=captured_user_id,
|
||||
session_id=captured_session_id,
|
||||
)
|
||||
if title:
|
||||
# Use dedicated title update function that doesn't
|
||||
# touch messages, avoiding race conditions
|
||||
@@ -698,6 +730,20 @@ async def _stream_chat_chunks(
|
||||
f"{f' (retry {retry_count}/{MAX_RETRIES})' if retry_count > 0 else ''}"
|
||||
)
|
||||
|
||||
# Build extra_body for OpenRouter tracing and PostHog analytics
|
||||
extra_body: dict[str, Any] = {
|
||||
"posthogProperties": {
|
||||
"environment": settings.config.app_env.value,
|
||||
},
|
||||
}
|
||||
if session.user_id:
|
||||
extra_body["user"] = session.user_id[:128] # OpenRouter limit
|
||||
extra_body["posthogDistinctId"] = session.user_id
|
||||
if session.session_id:
|
||||
extra_body["session_id"] = session.session_id[
|
||||
:128
|
||||
] # OpenRouter limit
|
||||
|
||||
# Create the stream with proper types
|
||||
stream = await client.chat.completions.create(
|
||||
model=model,
|
||||
@@ -706,6 +752,7 @@ async def _stream_chat_chunks(
|
||||
tool_choice="auto",
|
||||
stream=True,
|
||||
stream_options={"include_usage": True},
|
||||
extra_body=extra_body,
|
||||
)
|
||||
|
||||
# Variables to accumulate tool calls
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import track_tool_called
|
||||
|
||||
from .add_understanding import AddUnderstandingTool
|
||||
from .agent_output import AgentOutputTool
|
||||
@@ -20,6 +22,8 @@ from .search_docs import SearchDocsTool
|
||||
if TYPE_CHECKING:
|
||||
from backend.api.features.chat.response_model import StreamToolOutputAvailable
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Single source of truth for all tools
|
||||
TOOL_REGISTRY: dict[str, BaseTool] = {
|
||||
"add_understanding": AddUnderstandingTool(),
|
||||
@@ -56,4 +60,17 @@ async def execute_tool(
|
||||
tool = TOOL_REGISTRY.get(tool_name)
|
||||
if not tool:
|
||||
raise ValueError(f"Tool {tool_name} not found")
|
||||
|
||||
# Track tool call in PostHog
|
||||
logger.info(
|
||||
f"Tracking tool call: tool={tool_name}, user={user_id}, "
|
||||
f"session={session.session_id}, call_id={tool_call_id}"
|
||||
)
|
||||
track_tool_called(
|
||||
user_id=user_id,
|
||||
session_id=session.session_id,
|
||||
tool_name=tool_name,
|
||||
tool_call_id=tool_call_id,
|
||||
)
|
||||
|
||||
return await tool.execute(user_id, session, tool_call_id, **parameters)
|
||||
|
||||
@@ -8,6 +8,10 @@ from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.api.features.chat.config import ChatConfig
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tracking import (
|
||||
track_agent_run_success,
|
||||
track_agent_scheduled,
|
||||
)
|
||||
from backend.api.features.library import db as library_db
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
@@ -453,6 +457,16 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_runs.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_run_success(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
execution_id=execution.id,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
@@ -534,6 +548,18 @@ class RunAgentTool(BaseTool):
|
||||
session.successful_agent_schedules.get(library_agent.graph_id, 0) + 1
|
||||
)
|
||||
|
||||
# Track in PostHog
|
||||
track_agent_scheduled(
|
||||
user_id=user_id,
|
||||
session_id=session_id,
|
||||
graph_id=library_agent.graph_id,
|
||||
graph_name=library_agent.name,
|
||||
schedule_id=result.id,
|
||||
schedule_name=schedule_name,
|
||||
cron=cron,
|
||||
library_agent_id=library_agent.id,
|
||||
)
|
||||
|
||||
library_agent_link = f"/library/agents/{library_agent.id}"
|
||||
return ExecutionStartedResponse(
|
||||
message=(
|
||||
|
||||
250
autogpt_platform/backend/backend/api/features/chat/tracking.py
Normal file
@@ -0,0 +1,250 @@
|
||||
"""PostHog analytics tracking for the chat system."""
|
||||
|
||||
import atexit
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from posthog import Posthog
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
# PostHog client instance (lazily initialized)
|
||||
_posthog_client: Posthog | None = None
|
||||
|
||||
|
||||
def _shutdown_posthog() -> None:
|
||||
"""Flush and shutdown PostHog client on process exit."""
|
||||
if _posthog_client is not None:
|
||||
_posthog_client.flush()
|
||||
_posthog_client.shutdown()
|
||||
|
||||
|
||||
atexit.register(_shutdown_posthog)
|
||||
|
||||
|
||||
def _get_posthog_client() -> Posthog | None:
|
||||
"""Get or create the PostHog client instance."""
|
||||
global _posthog_client
|
||||
if _posthog_client is not None:
|
||||
return _posthog_client
|
||||
|
||||
if not settings.secrets.posthog_api_key:
|
||||
logger.debug("PostHog API key not configured, analytics disabled")
|
||||
return None
|
||||
|
||||
_posthog_client = Posthog(
|
||||
settings.secrets.posthog_api_key,
|
||||
host=settings.secrets.posthog_host,
|
||||
)
|
||||
logger.info(
|
||||
f"PostHog client initialized with host: {settings.secrets.posthog_host}"
|
||||
)
|
||||
return _posthog_client
|
||||
|
||||
|
||||
def _get_base_properties() -> dict[str, Any]:
|
||||
"""Get base properties included in all events."""
|
||||
return {
|
||||
"environment": settings.config.app_env.value,
|
||||
"source": "chat_copilot",
|
||||
}
|
||||
|
||||
|
||||
def track_user_message(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
message_length: int,
|
||||
) -> None:
|
||||
"""Track when a user sends a message in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
message_length: Length of the user's message
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"message_length": message_length,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id or f"anonymous_{session_id}",
|
||||
event="copilot_message_sent",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track user message: {e}")
|
||||
|
||||
|
||||
def track_tool_called(
|
||||
user_id: str | None,
|
||||
session_id: str,
|
||||
tool_name: str,
|
||||
tool_call_id: str,
|
||||
) -> None:
|
||||
"""Track when a tool is called in chat.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID (or None for anonymous)
|
||||
session_id: The chat session ID
|
||||
tool_name: Name of the tool being called
|
||||
tool_call_id: Unique ID of the tool call
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
logger.info("PostHog client not available for tool tracking")
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"tool_name": tool_name,
|
||||
"tool_call_id": tool_call_id,
|
||||
}
|
||||
distinct_id = user_id or f"anonymous_{session_id}"
|
||||
logger.info(
|
||||
f"Sending copilot_tool_called event to PostHog: distinct_id={distinct_id}, "
|
||||
f"tool_name={tool_name}"
|
||||
)
|
||||
client.capture(
|
||||
distinct_id=distinct_id,
|
||||
event="copilot_tool_called",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track tool call: {e}")
|
||||
|
||||
|
||||
def track_agent_run_success(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
execution_id: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully run.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
execution_id: ID of the execution
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"execution_id": execution_id,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_run_success",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent run: {e}")
|
||||
|
||||
|
||||
def track_agent_scheduled(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
schedule_id: str,
|
||||
schedule_name: str,
|
||||
cron: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when an agent is successfully scheduled.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
schedule_id: ID of the schedule
|
||||
schedule_name: Name of the schedule
|
||||
cron: Cron expression for the schedule
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"schedule_id": schedule_id,
|
||||
"schedule_name": schedule_name,
|
||||
"cron": cron,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_agent_scheduled",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track agent schedule: {e}")
|
||||
|
||||
|
||||
def track_trigger_setup(
|
||||
user_id: str,
|
||||
session_id: str,
|
||||
graph_id: str,
|
||||
graph_name: str,
|
||||
trigger_type: str,
|
||||
library_agent_id: str,
|
||||
) -> None:
|
||||
"""Track when a trigger is set up for an agent.
|
||||
|
||||
Args:
|
||||
user_id: The user's ID
|
||||
session_id: The chat session ID
|
||||
graph_id: ID of the agent graph
|
||||
graph_name: Name of the agent
|
||||
trigger_type: Type of trigger (e.g., 'webhook')
|
||||
library_agent_id: ID of the library agent
|
||||
"""
|
||||
client = _get_posthog_client()
|
||||
if not client:
|
||||
return
|
||||
|
||||
try:
|
||||
properties = {
|
||||
**_get_base_properties(),
|
||||
"session_id": session_id,
|
||||
"graph_id": graph_id,
|
||||
"graph_name": graph_name,
|
||||
"trigger_type": trigger_type,
|
||||
"library_agent_id": library_agent_id,
|
||||
}
|
||||
client.capture(
|
||||
distinct_id=user_id,
|
||||
event="copilot_trigger_setup",
|
||||
properties=properties,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to track trigger setup: {e}")
|
||||
659
autogpt_platform/backend/backend/blocks/claude_code.py
Normal file
@@ -0,0 +1,659 @@
|
||||
import json
|
||||
import shlex
|
||||
import uuid
|
||||
from typing import Literal, Optional
|
||||
|
||||
from e2b import AsyncSandbox as BaseAsyncSandbox
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchemaInput,
|
||||
BlockSchemaOutput,
|
||||
)
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
|
||||
class ClaudeCodeExecutionError(Exception):
|
||||
"""Exception raised when Claude Code execution fails.
|
||||
|
||||
Carries the sandbox_id so it can be returned to the user for cleanup
|
||||
when dispose_sandbox=False.
|
||||
"""
|
||||
|
||||
def __init__(self, message: str, sandbox_id: str = ""):
|
||||
super().__init__(message)
|
||||
self.sandbox_id = sandbox_id
|
||||
|
||||
|
||||
# Test credentials for E2B
|
||||
TEST_E2B_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="e2b",
|
||||
api_key=SecretStr("mock-e2b-api-key"),
|
||||
title="Mock E2B API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_E2B_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_E2B_CREDENTIALS.provider,
|
||||
"id": TEST_E2B_CREDENTIALS.id,
|
||||
"type": TEST_E2B_CREDENTIALS.type,
|
||||
"title": TEST_E2B_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
# Test credentials for Anthropic
|
||||
TEST_ANTHROPIC_CREDENTIALS = APIKeyCredentials(
|
||||
id="2e568a2b-b2ea-475a-8564-9a676bf31c56",
|
||||
provider="anthropic",
|
||||
api_key=SecretStr("mock-anthropic-api-key"),
|
||||
title="Mock Anthropic API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_ANTHROPIC_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_ANTHROPIC_CREDENTIALS.provider,
|
||||
"id": TEST_ANTHROPIC_CREDENTIALS.id,
|
||||
"type": TEST_ANTHROPIC_CREDENTIALS.type,
|
||||
"title": TEST_ANTHROPIC_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
class ClaudeCodeBlock(Block):
|
||||
"""
|
||||
Execute tasks using Claude Code (Anthropic's AI coding assistant) in an E2B sandbox.
|
||||
|
||||
Claude Code can create files, install tools, run commands, and perform complex
|
||||
coding tasks autonomously within a secure sandbox environment.
|
||||
"""
|
||||
|
||||
# Use base template - we'll install Claude Code ourselves for latest version
|
||||
DEFAULT_TEMPLATE = "base"
|
||||
|
||||
class Input(BlockSchemaInput):
|
||||
e2b_credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.E2B], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description=(
|
||||
"API key for the E2B platform to create the sandbox. "
|
||||
"Get one on the [e2b website](https://e2b.dev/docs)"
|
||||
),
|
||||
)
|
||||
|
||||
anthropic_credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.ANTHROPIC], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description=(
|
||||
"API key for Anthropic to power Claude Code. "
|
||||
"Get one at [Anthropic's website](https://console.anthropic.com)"
|
||||
),
|
||||
)
|
||||
|
||||
prompt: str = SchemaField(
|
||||
description=(
|
||||
"The task or instruction for Claude Code to execute. "
|
||||
"Claude Code can create files, install packages, run commands, "
|
||||
"and perform complex coding tasks."
|
||||
),
|
||||
placeholder="Create a hello world index.html file",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
timeout: int = SchemaField(
|
||||
description=(
|
||||
"Sandbox timeout in seconds. Claude Code tasks can take "
|
||||
"a while, so set this appropriately for your task complexity. "
|
||||
"Note: This only applies when creating a new sandbox. "
|
||||
"When reconnecting to an existing sandbox via sandbox_id, "
|
||||
"the original timeout is retained."
|
||||
),
|
||||
default=300, # 5 minutes default
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
setup_commands: list[str] = SchemaField(
|
||||
description=(
|
||||
"Optional shell commands to run before executing Claude Code. "
|
||||
"Useful for installing dependencies or setting up the environment."
|
||||
),
|
||||
default_factory=list,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
working_directory: str = SchemaField(
|
||||
description="Working directory for Claude Code to operate in.",
|
||||
default="/home/user",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
# Session/continuation support
|
||||
session_id: str = SchemaField(
|
||||
description=(
|
||||
"Session ID to resume a previous conversation. "
|
||||
"Leave empty for a new conversation. "
|
||||
"Use the session_id from a previous run to continue that conversation."
|
||||
),
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
sandbox_id: str = SchemaField(
|
||||
description=(
|
||||
"Sandbox ID to reconnect to an existing sandbox. "
|
||||
"Required when resuming a session (along with session_id). "
|
||||
"Use the sandbox_id from a previous run where dispose_sandbox was False."
|
||||
),
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
conversation_history: str = SchemaField(
|
||||
description=(
|
||||
"Previous conversation history to continue from. "
|
||||
"Use this to restore context on a fresh sandbox if the previous one timed out. "
|
||||
"Pass the conversation_history output from a previous run."
|
||||
),
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
dispose_sandbox: bool = SchemaField(
|
||||
description=(
|
||||
"Whether to dispose of the sandbox immediately after execution. "
|
||||
"Set to False if you want to continue the conversation later "
|
||||
"(you'll need both sandbox_id and session_id from the output)."
|
||||
),
|
||||
default=True,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class FileOutput(BaseModel):
|
||||
"""A file extracted from the sandbox."""
|
||||
|
||||
path: str
|
||||
relative_path: str # Path relative to working directory (for GitHub, etc.)
|
||||
name: str
|
||||
content: str
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
response: str = SchemaField(
|
||||
description="The output/response from Claude Code execution"
|
||||
)
|
||||
files: list["ClaudeCodeBlock.FileOutput"] = SchemaField(
|
||||
description=(
|
||||
"List of text files created/modified by Claude Code during this execution. "
|
||||
"Each file has 'path', 'relative_path', 'name', and 'content' fields."
|
||||
)
|
||||
)
|
||||
conversation_history: str = SchemaField(
|
||||
description=(
|
||||
"Full conversation history including this turn. "
|
||||
"Pass this to conversation_history input to continue on a fresh sandbox "
|
||||
"if the previous sandbox timed out."
|
||||
)
|
||||
)
|
||||
session_id: str = SchemaField(
|
||||
description=(
|
||||
"Session ID for this conversation. "
|
||||
"Pass this back along with sandbox_id to continue the conversation."
|
||||
)
|
||||
)
|
||||
sandbox_id: Optional[str] = SchemaField(
|
||||
description=(
|
||||
"ID of the sandbox instance. "
|
||||
"Pass this back along with session_id to continue the conversation. "
|
||||
"This is None if dispose_sandbox was True (sandbox was disposed)."
|
||||
),
|
||||
default=None,
|
||||
)
|
||||
error: str = SchemaField(description="Error message if execution failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4e34f4a5-9b89-4326-ba77-2dd6750b7194",
|
||||
description=(
|
||||
"Execute tasks using Claude Code in an E2B sandbox. "
|
||||
"Claude Code can create files, install tools, run commands, "
|
||||
"and perform complex coding tasks autonomously."
|
||||
),
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.AI},
|
||||
input_schema=ClaudeCodeBlock.Input,
|
||||
output_schema=ClaudeCodeBlock.Output,
|
||||
test_credentials={
|
||||
"e2b_credentials": TEST_E2B_CREDENTIALS,
|
||||
"anthropic_credentials": TEST_ANTHROPIC_CREDENTIALS,
|
||||
},
|
||||
test_input={
|
||||
"e2b_credentials": TEST_E2B_CREDENTIALS_INPUT,
|
||||
"anthropic_credentials": TEST_ANTHROPIC_CREDENTIALS_INPUT,
|
||||
"prompt": "Create a hello world HTML file",
|
||||
"timeout": 300,
|
||||
"setup_commands": [],
|
||||
"working_directory": "/home/user",
|
||||
"session_id": "",
|
||||
"sandbox_id": "",
|
||||
"conversation_history": "",
|
||||
"dispose_sandbox": True,
|
||||
},
|
||||
test_output=[
|
||||
("response", "Created index.html with hello world content"),
|
||||
(
|
||||
"files",
|
||||
[
|
||||
{
|
||||
"path": "/home/user/index.html",
|
||||
"relative_path": "index.html",
|
||||
"name": "index.html",
|
||||
"content": "<html>Hello World</html>",
|
||||
}
|
||||
],
|
||||
),
|
||||
(
|
||||
"conversation_history",
|
||||
"User: Create a hello world HTML file\n"
|
||||
"Claude: Created index.html with hello world content",
|
||||
),
|
||||
("session_id", str),
|
||||
("sandbox_id", None), # None because dispose_sandbox=True in test_input
|
||||
],
|
||||
test_mock={
|
||||
"execute_claude_code": lambda *args, **kwargs: (
|
||||
"Created index.html with hello world content", # response
|
||||
[
|
||||
ClaudeCodeBlock.FileOutput(
|
||||
path="/home/user/index.html",
|
||||
relative_path="index.html",
|
||||
name="index.html",
|
||||
content="<html>Hello World</html>",
|
||||
)
|
||||
], # files
|
||||
"User: Create a hello world HTML file\n"
|
||||
"Claude: Created index.html with hello world content", # conversation_history
|
||||
"test-session-id", # session_id
|
||||
"sandbox_id", # sandbox_id
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
async def execute_claude_code(
|
||||
self,
|
||||
e2b_api_key: str,
|
||||
anthropic_api_key: str,
|
||||
prompt: str,
|
||||
timeout: int,
|
||||
setup_commands: list[str],
|
||||
working_directory: str,
|
||||
session_id: str,
|
||||
existing_sandbox_id: str,
|
||||
conversation_history: str,
|
||||
dispose_sandbox: bool,
|
||||
) -> tuple[str, list["ClaudeCodeBlock.FileOutput"], str, str, str]:
|
||||
"""
|
||||
Execute Claude Code in an E2B sandbox.
|
||||
|
||||
Returns:
|
||||
Tuple of (response, files, conversation_history, session_id, sandbox_id)
|
||||
"""
|
||||
|
||||
# Validate that sandbox_id is provided when resuming a session
|
||||
if session_id and not existing_sandbox_id:
|
||||
raise ValueError(
|
||||
"sandbox_id is required when resuming a session with session_id. "
|
||||
"The session state is stored in the original sandbox. "
|
||||
"If the sandbox has timed out, use conversation_history instead "
|
||||
"to restore context on a fresh sandbox."
|
||||
)
|
||||
|
||||
sandbox = None
|
||||
sandbox_id = ""
|
||||
|
||||
try:
|
||||
# Either reconnect to existing sandbox or create a new one
|
||||
if existing_sandbox_id:
|
||||
# Reconnect to existing sandbox for conversation continuation
|
||||
sandbox = await BaseAsyncSandbox.connect(
|
||||
sandbox_id=existing_sandbox_id,
|
||||
api_key=e2b_api_key,
|
||||
)
|
||||
else:
|
||||
# Create new sandbox
|
||||
sandbox = await BaseAsyncSandbox.create(
|
||||
template=self.DEFAULT_TEMPLATE,
|
||||
api_key=e2b_api_key,
|
||||
timeout=timeout,
|
||||
envs={"ANTHROPIC_API_KEY": anthropic_api_key},
|
||||
)
|
||||
|
||||
# Install Claude Code from npm (ensures we get the latest version)
|
||||
install_result = await sandbox.commands.run(
|
||||
"npm install -g @anthropic-ai/claude-code@latest",
|
||||
timeout=120, # 2 min timeout for install
|
||||
)
|
||||
if install_result.exit_code != 0:
|
||||
raise Exception(
|
||||
f"Failed to install Claude Code: {install_result.stderr}"
|
||||
)
|
||||
|
||||
# Run any user-provided setup commands
|
||||
for cmd in setup_commands:
|
||||
setup_result = await sandbox.commands.run(cmd)
|
||||
if setup_result.exit_code != 0:
|
||||
raise Exception(
|
||||
f"Setup command failed: {cmd}\n"
|
||||
f"Exit code: {setup_result.exit_code}\n"
|
||||
f"Stdout: {setup_result.stdout}\n"
|
||||
f"Stderr: {setup_result.stderr}"
|
||||
)
|
||||
|
||||
# Capture sandbox_id immediately after creation/connection
|
||||
# so it's available for error recovery if dispose_sandbox=False
|
||||
sandbox_id = sandbox.sandbox_id
|
||||
|
||||
# Generate or use provided session ID
|
||||
current_session_id = session_id if session_id else str(uuid.uuid4())
|
||||
|
||||
# Build base Claude flags
|
||||
base_flags = "-p --dangerously-skip-permissions --output-format json"
|
||||
|
||||
# Add conversation history context if provided (for fresh sandbox continuation)
|
||||
history_flag = ""
|
||||
if conversation_history and not session_id:
|
||||
# Inject previous conversation as context via system prompt
|
||||
# Use consistent escaping via _escape_prompt helper
|
||||
escaped_history = self._escape_prompt(
|
||||
f"Previous conversation context: {conversation_history}"
|
||||
)
|
||||
history_flag = f" --append-system-prompt {escaped_history}"
|
||||
|
||||
# Build Claude command based on whether we're resuming or starting new
|
||||
# Use shlex.quote for working_directory and session IDs to prevent injection
|
||||
safe_working_dir = shlex.quote(working_directory)
|
||||
if session_id:
|
||||
# Resuming existing session (sandbox still alive)
|
||||
safe_session_id = shlex.quote(session_id)
|
||||
claude_command = (
|
||||
f"cd {safe_working_dir} && "
|
||||
f"echo {self._escape_prompt(prompt)} | "
|
||||
f"claude --resume {safe_session_id} {base_flags}"
|
||||
)
|
||||
else:
|
||||
# New session with specific ID
|
||||
safe_current_session_id = shlex.quote(current_session_id)
|
||||
claude_command = (
|
||||
f"cd {safe_working_dir} && "
|
||||
f"echo {self._escape_prompt(prompt)} | "
|
||||
f"claude --session-id {safe_current_session_id} {base_flags}{history_flag}"
|
||||
)
|
||||
|
||||
# Capture timestamp before running Claude Code to filter files later
|
||||
# Capture timestamp 1 second in the past to avoid race condition with file creation
|
||||
timestamp_result = await sandbox.commands.run(
|
||||
"date -u -d '1 second ago' +%Y-%m-%dT%H:%M:%S"
|
||||
)
|
||||
if timestamp_result.exit_code != 0:
|
||||
raise RuntimeError(
|
||||
f"Failed to capture timestamp: {timestamp_result.stderr}"
|
||||
)
|
||||
start_timestamp = (
|
||||
timestamp_result.stdout.strip() if timestamp_result.stdout else None
|
||||
)
|
||||
|
||||
result = await sandbox.commands.run(
|
||||
claude_command,
|
||||
timeout=0, # No command timeout - let sandbox timeout handle it
|
||||
)
|
||||
|
||||
# Check for command failure
|
||||
if result.exit_code != 0:
|
||||
error_msg = result.stderr or result.stdout or "Unknown error"
|
||||
raise Exception(
|
||||
f"Claude Code command failed with exit code {result.exit_code}:\n"
|
||||
f"{error_msg}"
|
||||
)
|
||||
|
||||
raw_output = result.stdout or ""
|
||||
|
||||
# Parse JSON output to extract response and build conversation history
|
||||
response = ""
|
||||
new_conversation_history = conversation_history or ""
|
||||
|
||||
try:
|
||||
# The JSON output contains the result
|
||||
output_data = json.loads(raw_output)
|
||||
response = output_data.get("result", raw_output)
|
||||
|
||||
# Build conversation history entry
|
||||
turn_entry = f"User: {prompt}\nClaude: {response}"
|
||||
if new_conversation_history:
|
||||
new_conversation_history = (
|
||||
f"{new_conversation_history}\n\n{turn_entry}"
|
||||
)
|
||||
else:
|
||||
new_conversation_history = turn_entry
|
||||
|
||||
except json.JSONDecodeError:
|
||||
# If not valid JSON, use raw output
|
||||
response = raw_output
|
||||
turn_entry = f"User: {prompt}\nClaude: {response}"
|
||||
if new_conversation_history:
|
||||
new_conversation_history = (
|
||||
f"{new_conversation_history}\n\n{turn_entry}"
|
||||
)
|
||||
else:
|
||||
new_conversation_history = turn_entry
|
||||
|
||||
# Extract files created/modified during this run
|
||||
files = await self._extract_files(
|
||||
sandbox, working_directory, start_timestamp
|
||||
)
|
||||
|
||||
return (
|
||||
response,
|
||||
files,
|
||||
new_conversation_history,
|
||||
current_session_id,
|
||||
sandbox_id,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
# Wrap exception with sandbox_id so caller can access/cleanup
|
||||
# the preserved sandbox when dispose_sandbox=False
|
||||
raise ClaudeCodeExecutionError(str(e), sandbox_id) from e
|
||||
|
||||
finally:
|
||||
if dispose_sandbox and sandbox:
|
||||
await sandbox.kill()
|
||||
|
||||
async def _extract_files(
|
||||
self,
|
||||
sandbox: BaseAsyncSandbox,
|
||||
working_directory: str,
|
||||
since_timestamp: str | None = None,
|
||||
) -> list["ClaudeCodeBlock.FileOutput"]:
|
||||
"""
|
||||
Extract text files created/modified during this Claude Code execution.
|
||||
|
||||
Args:
|
||||
sandbox: The E2B sandbox instance
|
||||
working_directory: Directory to search for files
|
||||
since_timestamp: ISO timestamp - only return files modified after this time
|
||||
|
||||
Returns:
|
||||
List of FileOutput objects with path, relative_path, name, and content
|
||||
"""
|
||||
files: list[ClaudeCodeBlock.FileOutput] = []
|
||||
|
||||
# Text file extensions we can safely read as text
|
||||
text_extensions = {
|
||||
".txt",
|
||||
".md",
|
||||
".html",
|
||||
".htm",
|
||||
".css",
|
||||
".js",
|
||||
".ts",
|
||||
".jsx",
|
||||
".tsx",
|
||||
".json",
|
||||
".xml",
|
||||
".yaml",
|
||||
".yml",
|
||||
".toml",
|
||||
".ini",
|
||||
".cfg",
|
||||
".conf",
|
||||
".py",
|
||||
".rb",
|
||||
".php",
|
||||
".java",
|
||||
".c",
|
||||
".cpp",
|
||||
".h",
|
||||
".hpp",
|
||||
".cs",
|
||||
".go",
|
||||
".rs",
|
||||
".swift",
|
||||
".kt",
|
||||
".scala",
|
||||
".sh",
|
||||
".bash",
|
||||
".zsh",
|
||||
".sql",
|
||||
".graphql",
|
||||
".env",
|
||||
".gitignore",
|
||||
".dockerfile",
|
||||
"Dockerfile",
|
||||
".vue",
|
||||
".svelte",
|
||||
".astro",
|
||||
".mdx",
|
||||
".rst",
|
||||
".tex",
|
||||
".csv",
|
||||
".log",
|
||||
}
|
||||
|
||||
try:
|
||||
# List files recursively using find command
|
||||
# Exclude node_modules and .git directories, but allow hidden files
|
||||
# like .env and .gitignore (they're filtered by text_extensions later)
|
||||
# Filter by timestamp to only get files created/modified during this run
|
||||
safe_working_dir = shlex.quote(working_directory)
|
||||
timestamp_filter = ""
|
||||
if since_timestamp:
|
||||
timestamp_filter = f"-newermt {shlex.quote(since_timestamp)} "
|
||||
find_result = await sandbox.commands.run(
|
||||
f"find {safe_working_dir} -type f "
|
||||
f"{timestamp_filter}"
|
||||
f"-not -path '*/node_modules/*' "
|
||||
f"-not -path '*/.git/*' "
|
||||
f"2>/dev/null"
|
||||
)
|
||||
|
||||
if find_result.stdout:
|
||||
for file_path in find_result.stdout.strip().split("\n"):
|
||||
if not file_path:
|
||||
continue
|
||||
|
||||
# Check if it's a text file we can read
|
||||
is_text = any(
|
||||
file_path.endswith(ext) for ext in text_extensions
|
||||
) or file_path.endswith("Dockerfile")
|
||||
|
||||
if is_text:
|
||||
try:
|
||||
content = await sandbox.files.read(file_path)
|
||||
# Handle bytes or string
|
||||
if isinstance(content, bytes):
|
||||
content = content.decode("utf-8", errors="replace")
|
||||
|
||||
# Extract filename from path
|
||||
file_name = file_path.split("/")[-1]
|
||||
|
||||
# Calculate relative path by stripping working directory
|
||||
relative_path = file_path
|
||||
if file_path.startswith(working_directory):
|
||||
relative_path = file_path[len(working_directory) :]
|
||||
# Remove leading slash if present
|
||||
if relative_path.startswith("/"):
|
||||
relative_path = relative_path[1:]
|
||||
|
||||
files.append(
|
||||
ClaudeCodeBlock.FileOutput(
|
||||
path=file_path,
|
||||
relative_path=relative_path,
|
||||
name=file_name,
|
||||
content=content,
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
# Skip files that can't be read
|
||||
pass
|
||||
|
||||
except Exception:
|
||||
# If file extraction fails, return empty results
|
||||
pass
|
||||
|
||||
return files
|
||||
|
||||
def _escape_prompt(self, prompt: str) -> str:
|
||||
"""Escape the prompt for safe shell execution."""
|
||||
# Use single quotes and escape any single quotes in the prompt
|
||||
escaped = prompt.replace("'", "'\"'\"'")
|
||||
return f"'{escaped}'"
|
||||
|
||||
async def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
e2b_credentials: APIKeyCredentials,
|
||||
anthropic_credentials: APIKeyCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
(
|
||||
response,
|
||||
files,
|
||||
conversation_history,
|
||||
session_id,
|
||||
sandbox_id,
|
||||
) = await self.execute_claude_code(
|
||||
e2b_api_key=e2b_credentials.api_key.get_secret_value(),
|
||||
anthropic_api_key=anthropic_credentials.api_key.get_secret_value(),
|
||||
prompt=input_data.prompt,
|
||||
timeout=input_data.timeout,
|
||||
setup_commands=input_data.setup_commands,
|
||||
working_directory=input_data.working_directory,
|
||||
session_id=input_data.session_id,
|
||||
existing_sandbox_id=input_data.sandbox_id,
|
||||
conversation_history=input_data.conversation_history,
|
||||
dispose_sandbox=input_data.dispose_sandbox,
|
||||
)
|
||||
|
||||
yield "response", response
|
||||
# Always yield files (empty list if none) to match Output schema
|
||||
yield "files", [f.model_dump() for f in files]
|
||||
# Always yield conversation_history so user can restore context on fresh sandbox
|
||||
yield "conversation_history", conversation_history
|
||||
# Always yield session_id so user can continue conversation
|
||||
yield "session_id", session_id
|
||||
# Always yield sandbox_id (None if disposed) to match Output schema
|
||||
yield "sandbox_id", sandbox_id if not input_data.dispose_sandbox else None
|
||||
|
||||
except ClaudeCodeExecutionError as e:
|
||||
yield "error", str(e)
|
||||
# If sandbox was preserved (dispose_sandbox=False), yield sandbox_id
|
||||
# so user can reconnect to or clean up the orphaned sandbox
|
||||
if not input_data.dispose_sandbox and e.sandbox_id:
|
||||
yield "sandbox_id", e.sandbox_id
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -666,6 +666,12 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
default="https://cloud.langfuse.com", description="Langfuse host URL"
|
||||
)
|
||||
|
||||
# PostHog analytics
|
||||
posthog_api_key: str = Field(default="", description="PostHog API key")
|
||||
posthog_host: str = Field(
|
||||
default="https://us.i.posthog.com", description="PostHog host URL"
|
||||
)
|
||||
|
||||
# Add more secret fields as needed
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
|
||||
12
autogpt_platform/backend/poetry.lock
generated
@@ -4204,14 +4204,14 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "6.1.1"
|
||||
version = "7.6.0"
|
||||
description = "Integrate PostHog into any python application."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "posthog-6.1.1-py3-none-any.whl", hash = "sha256:329fd3d06b4d54cec925f47235bd8e327c91403c2f9ec38f1deb849535934dba"},
|
||||
{file = "posthog-6.1.1.tar.gz", hash = "sha256:b453f54c4a2589da859fd575dd3bf86fcb40580727ec399535f268b1b9f318b8"},
|
||||
{file = "posthog-7.6.0-py3-none-any.whl", hash = "sha256:c4dd78cf77c4fecceb965f86066e5ac37886ef867d68ffe75a1db5d681d7d9ad"},
|
||||
{file = "posthog-7.6.0.tar.gz", hash = "sha256:941dfd278ee427c9b14640f09b35b5bb52a71bdf028d7dbb7307e1838fd3002e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4225,7 +4225,7 @@ typing-extensions = ">=4.2.0"
|
||||
[package.extras]
|
||||
dev = ["django-stubs", "lxml", "mypy", "mypy-baseline", "packaging", "pre-commit", "pydantic", "ruff", "setuptools", "tomli", "tomli_w", "twine", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six", "wheel"]
|
||||
langchain = ["langchain (>=0.2.0)"]
|
||||
test = ["anthropic", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=0.3.15)", "langchain-community (>=0.3.25)", "langchain-core (>=0.3.65)", "langchain-openai (>=0.3.22)", "langgraph (>=0.4.8)", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
test = ["anthropic (>=0.72)", "coverage", "django", "freezegun (==1.5.1)", "google-genai", "langchain-anthropic (>=1.0)", "langchain-community (>=0.4)", "langchain-core (>=1.0)", "langchain-openai (>=1.0)", "langgraph (>=1.0)", "mock (>=2.0.0)", "openai (>=2.0)", "parameterized (>=0.8.1)", "pydantic", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "postmarker"
|
||||
@@ -7512,4 +7512,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "18b92e09596298c82432e4d0a85cb6d80a40b4229bee0a0c15f0529fd6cb21a4"
|
||||
content-hash = "ee5742dc1a9df50dfc06d4b26a1682cbb2b25cab6b79ce5625ec272f93e4f4bf"
|
||||
|
||||
@@ -85,6 +85,7 @@ exa-py = "^1.14.20"
|
||||
croniter = "^6.0.0"
|
||||
stagehand = "^0.5.1"
|
||||
gravitas-md2gdocs = "^0.1.0"
|
||||
posthog = "^7.6.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aiohappyeyeballs = "^2.6.1"
|
||||
|
||||
@@ -34,7 +34,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Default output directory relative to repo root
|
||||
DEFAULT_OUTPUT_DIR = (
|
||||
Path(__file__).parent.parent.parent.parent / "docs" / "integrations"
|
||||
Path(__file__).parent.parent.parent.parent
|
||||
/ "docs"
|
||||
/ "integrations"
|
||||
/ "block-integrations"
|
||||
)
|
||||
|
||||
|
||||
@@ -421,6 +424,14 @@ def generate_block_markdown(
|
||||
lines.append("<!-- END MANUAL -->")
|
||||
lines.append("")
|
||||
|
||||
# Optional per-block extras (only include if has content)
|
||||
extras = manual_content.get("extras", "")
|
||||
if extras:
|
||||
lines.append("<!-- MANUAL: extras -->")
|
||||
lines.append(extras)
|
||||
lines.append("<!-- END MANUAL -->")
|
||||
lines.append("")
|
||||
|
||||
lines.append("---")
|
||||
lines.append("")
|
||||
|
||||
@@ -456,25 +467,52 @@ def get_block_file_mapping(blocks: list[BlockDoc]) -> dict[str, list[BlockDoc]]:
|
||||
return dict(file_mapping)
|
||||
|
||||
|
||||
def generate_overview_table(blocks: list[BlockDoc]) -> str:
|
||||
"""Generate the overview table markdown (blocks.md)."""
|
||||
def generate_overview_table(blocks: list[BlockDoc], block_dir_prefix: str = "") -> str:
|
||||
"""Generate the overview table markdown (blocks.md).
|
||||
|
||||
Args:
|
||||
blocks: List of block documentation objects
|
||||
block_dir_prefix: Prefix for block file links (e.g., "block-integrations/")
|
||||
"""
|
||||
lines = []
|
||||
|
||||
# GitBook YAML frontmatter
|
||||
lines.append("---")
|
||||
lines.append("layout:")
|
||||
lines.append(" width: default")
|
||||
lines.append(" title:")
|
||||
lines.append(" visible: true")
|
||||
lines.append(" description:")
|
||||
lines.append(" visible: true")
|
||||
lines.append(" tableOfContents:")
|
||||
lines.append(" visible: false")
|
||||
lines.append(" outline:")
|
||||
lines.append(" visible: true")
|
||||
lines.append(" pagination:")
|
||||
lines.append(" visible: true")
|
||||
lines.append(" metadata:")
|
||||
lines.append(" visible: true")
|
||||
lines.append("---")
|
||||
lines.append("")
|
||||
|
||||
lines.append("# AutoGPT Blocks Overview")
|
||||
lines.append("")
|
||||
lines.append(
|
||||
'AutoGPT uses a modular approach with various "blocks" to handle different tasks. These blocks are the building blocks of AutoGPT workflows, allowing users to create complex automations by combining simple, specialized components.'
|
||||
)
|
||||
lines.append("")
|
||||
lines.append('!!! info "Creating Your Own Blocks"')
|
||||
lines.append(" Want to create your own custom blocks? Check out our guides:")
|
||||
lines.append(" ")
|
||||
lines.append('{% hint style="info" %}')
|
||||
lines.append("**Creating Your Own Blocks**")
|
||||
lines.append("")
|
||||
lines.append("Want to create your own custom blocks? Check out our guides:")
|
||||
lines.append("")
|
||||
lines.append(
|
||||
" - [Build your own Blocks](https://docs.agpt.co/platform/new_blocks/) - Step-by-step tutorial with examples"
|
||||
"* [Build your own Blocks](https://docs.agpt.co/platform/new_blocks/) - Step-by-step tutorial with examples"
|
||||
)
|
||||
lines.append(
|
||||
" - [Block SDK Guide](https://docs.agpt.co/platform/block-sdk-guide/) - Advanced SDK patterns with OAuth, webhooks, and provider configuration"
|
||||
"* [Block SDK Guide](https://docs.agpt.co/platform/block-sdk-guide/) - Advanced SDK patterns with OAuth, webhooks, and provider configuration"
|
||||
)
|
||||
lines.append("{% endhint %}")
|
||||
lines.append("")
|
||||
lines.append(
|
||||
"Below is a comprehensive list of all available blocks, categorized by their primary function. Click on any block name to view its detailed documentation."
|
||||
@@ -537,7 +575,8 @@ def generate_overview_table(blocks: list[BlockDoc]) -> str:
|
||||
else "No description"
|
||||
)
|
||||
short_desc = short_desc.replace("\n", " ").replace("|", "\\|")
|
||||
lines.append(f"| [{block.name}]({file_path}#{anchor}) | {short_desc} |")
|
||||
link_path = f"{block_dir_prefix}{file_path}"
|
||||
lines.append(f"| [{block.name}]({link_path}#{anchor}) | {short_desc} |")
|
||||
lines.append("")
|
||||
continue
|
||||
|
||||
@@ -563,13 +602,55 @@ def generate_overview_table(blocks: list[BlockDoc]) -> str:
|
||||
)
|
||||
short_desc = short_desc.replace("\n", " ").replace("|", "\\|")
|
||||
|
||||
lines.append(f"| [{block.name}]({file_path}#{anchor}) | {short_desc} |")
|
||||
link_path = f"{block_dir_prefix}{file_path}"
|
||||
lines.append(f"| [{block.name}]({link_path}#{anchor}) | {short_desc} |")
|
||||
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def generate_summary_md(
|
||||
blocks: list[BlockDoc], root_dir: Path, block_dir_prefix: str = ""
|
||||
) -> str:
|
||||
"""Generate SUMMARY.md for GitBook navigation.
|
||||
|
||||
Args:
|
||||
blocks: List of block documentation objects
|
||||
root_dir: The root docs directory (e.g., docs/integrations/)
|
||||
block_dir_prefix: Prefix for block file links (e.g., "block-integrations/")
|
||||
"""
|
||||
lines = []
|
||||
lines.append("# Table of contents")
|
||||
lines.append("")
|
||||
lines.append("* [AutoGPT Blocks Overview](README.md)")
|
||||
lines.append("")
|
||||
|
||||
# Check for guides/ directory at the root level (docs/integrations/guides/)
|
||||
guides_dir = root_dir / "guides"
|
||||
if guides_dir.exists():
|
||||
lines.append("## Guides")
|
||||
lines.append("")
|
||||
for guide_file in sorted(guides_dir.glob("*.md")):
|
||||
# Use just the file name for title (replace hyphens/underscores with spaces)
|
||||
title = file_path_to_title(guide_file.stem.replace("-", "_") + ".md")
|
||||
lines.append(f"* [{title}](guides/{guide_file.name})")
|
||||
lines.append("")
|
||||
|
||||
lines.append("## Block Integrations")
|
||||
lines.append("")
|
||||
|
||||
file_mapping = get_block_file_mapping(blocks)
|
||||
for file_path in sorted(file_mapping.keys()):
|
||||
title = file_path_to_title(file_path)
|
||||
link_path = f"{block_dir_prefix}{file_path}"
|
||||
lines.append(f"* [{title}]({link_path})")
|
||||
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def load_all_blocks_for_docs() -> list[BlockDoc]:
|
||||
"""Load all blocks and extract documentation."""
|
||||
from backend.blocks import load_all_blocks
|
||||
@@ -653,6 +734,16 @@ def write_block_docs(
|
||||
)
|
||||
)
|
||||
|
||||
# Add file-level additional_content section if present
|
||||
file_additional = extract_manual_content(existing_content).get(
|
||||
"additional_content", ""
|
||||
)
|
||||
if file_additional:
|
||||
content_parts.append("<!-- MANUAL: additional_content -->")
|
||||
content_parts.append(file_additional)
|
||||
content_parts.append("<!-- END MANUAL -->")
|
||||
content_parts.append("")
|
||||
|
||||
full_content = file_header + "\n" + "\n".join(content_parts)
|
||||
generated_files[str(file_path)] = full_content
|
||||
|
||||
@@ -661,14 +752,28 @@ def write_block_docs(
|
||||
|
||||
full_path.write_text(full_content)
|
||||
|
||||
# Generate overview file
|
||||
overview_content = generate_overview_table(blocks)
|
||||
overview_path = output_dir / "README.md"
|
||||
# Generate overview file at the parent directory (docs/integrations/)
|
||||
# with links prefixed to point into block-integrations/
|
||||
root_dir = output_dir.parent
|
||||
block_dir_name = output_dir.name # "block-integrations"
|
||||
block_dir_prefix = f"{block_dir_name}/"
|
||||
|
||||
overview_content = generate_overview_table(blocks, block_dir_prefix)
|
||||
overview_path = root_dir / "README.md"
|
||||
generated_files["README.md"] = overview_content
|
||||
overview_path.write_text(overview_content)
|
||||
|
||||
if verbose:
|
||||
print(" Writing README.md (overview)")
|
||||
print(" Writing README.md (overview) to parent directory")
|
||||
|
||||
# Generate SUMMARY.md for GitBook navigation at the parent directory
|
||||
summary_content = generate_summary_md(blocks, root_dir, block_dir_prefix)
|
||||
summary_path = root_dir / "SUMMARY.md"
|
||||
generated_files["SUMMARY.md"] = summary_content
|
||||
summary_path.write_text(summary_content)
|
||||
|
||||
if verbose:
|
||||
print(" Writing SUMMARY.md (navigation) to parent directory")
|
||||
|
||||
return generated_files
|
||||
|
||||
@@ -748,6 +853,16 @@ def check_docs_in_sync(output_dir: Path, blocks: list[BlockDoc]) -> bool:
|
||||
elif block_match.group(1).strip() != expected_block_content.strip():
|
||||
mismatched_blocks.append(block.name)
|
||||
|
||||
# Add file-level additional_content to expected content (matches write_block_docs)
|
||||
file_additional = extract_manual_content(existing_content).get(
|
||||
"additional_content", ""
|
||||
)
|
||||
if file_additional:
|
||||
content_parts.append("<!-- MANUAL: additional_content -->")
|
||||
content_parts.append(file_additional)
|
||||
content_parts.append("<!-- END MANUAL -->")
|
||||
content_parts.append("")
|
||||
|
||||
expected_content = file_header + "\n" + "\n".join(content_parts)
|
||||
|
||||
if existing_content.strip() != expected_content.strip():
|
||||
@@ -757,11 +872,15 @@ def check_docs_in_sync(output_dir: Path, blocks: list[BlockDoc]) -> bool:
|
||||
out_of_sync_details.append((file_path, mismatched_blocks))
|
||||
all_match = False
|
||||
|
||||
# Check overview
|
||||
overview_path = output_dir / "README.md"
|
||||
# Check overview at the parent directory (docs/integrations/)
|
||||
root_dir = output_dir.parent
|
||||
block_dir_name = output_dir.name # "block-integrations"
|
||||
block_dir_prefix = f"{block_dir_name}/"
|
||||
|
||||
overview_path = root_dir / "README.md"
|
||||
if overview_path.exists():
|
||||
existing_overview = overview_path.read_text()
|
||||
expected_overview = generate_overview_table(blocks)
|
||||
expected_overview = generate_overview_table(blocks, block_dir_prefix)
|
||||
if existing_overview.strip() != expected_overview.strip():
|
||||
print("OUT OF SYNC: README.md (overview)")
|
||||
print(" The blocks overview table needs regeneration")
|
||||
@@ -772,6 +891,21 @@ def check_docs_in_sync(output_dir: Path, blocks: list[BlockDoc]) -> bool:
|
||||
out_of_sync_details.append(("README.md", ["overview table"]))
|
||||
all_match = False
|
||||
|
||||
# Check SUMMARY.md at the parent directory
|
||||
summary_path = root_dir / "SUMMARY.md"
|
||||
if summary_path.exists():
|
||||
existing_summary = summary_path.read_text()
|
||||
expected_summary = generate_summary_md(blocks, root_dir, block_dir_prefix)
|
||||
if existing_summary.strip() != expected_summary.strip():
|
||||
print("OUT OF SYNC: SUMMARY.md (navigation)")
|
||||
print(" The GitBook navigation needs regeneration")
|
||||
out_of_sync_details.append(("SUMMARY.md", ["navigation"]))
|
||||
all_match = False
|
||||
else:
|
||||
print("MISSING: SUMMARY.md (navigation)")
|
||||
out_of_sync_details.append(("SUMMARY.md", ["navigation"]))
|
||||
all_match = False
|
||||
|
||||
# Check for unfilled manual sections
|
||||
unfilled_patterns = [
|
||||
"_Add a description of this category of blocks._",
|
||||
|
||||
@@ -30,3 +30,7 @@ NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# PR previews
|
||||
NEXT_PUBLIC_PREVIEW_STEALING_DEV=
|
||||
|
||||
# PostHog Analytics
|
||||
NEXT_PUBLIC_POSTHOG_KEY=
|
||||
NEXT_PUBLIC_POSTHOG_HOST=https://eu.i.posthog.com
|
||||
|
||||
@@ -34,6 +34,7 @@
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@posthog/react": "1.7.0",
|
||||
"@radix-ui/react-accordion": "1.2.12",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
"@radix-ui/react-avatar": "1.1.10",
|
||||
@@ -91,6 +92,7 @@
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.7.2",
|
||||
"party-js": "2.2.0",
|
||||
"posthog-js": "1.334.1",
|
||||
"react": "18.3.1",
|
||||
"react-currency-input-field": "4.0.3",
|
||||
"react-day-picker": "9.11.1",
|
||||
@@ -120,7 +122,6 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.1.2",
|
||||
"happy-dom": "20.3.4",
|
||||
"@opentelemetry/instrumentation": "0.209.0",
|
||||
"@playwright/test": "1.56.1",
|
||||
"@storybook/addon-a11y": "9.1.5",
|
||||
@@ -148,6 +149,7 @@
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.5.7",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
"happy-dom": "20.3.4",
|
||||
"import-in-the-middle": "2.0.2",
|
||||
"msw": "2.11.6",
|
||||
"msw-storybook-addon": "2.0.6",
|
||||
|
||||
246
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -23,6 +23,9 @@ importers:
|
||||
'@phosphor-icons/react':
|
||||
specifier: 2.1.10
|
||||
version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
'@posthog/react':
|
||||
specifier: 1.7.0
|
||||
version: 1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)
|
||||
'@radix-ui/react-accordion':
|
||||
specifier: 1.2.12
|
||||
version: 1.2.12(@types/react-dom@18.3.5(@types/react@18.3.17))(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -194,6 +197,9 @@ importers:
|
||||
party-js:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0
|
||||
posthog-js:
|
||||
specifier: 1.334.1
|
||||
version: 1.334.1
|
||||
react:
|
||||
specifier: 18.3.1
|
||||
version: 18.3.1
|
||||
@@ -1794,6 +1800,10 @@ packages:
|
||||
'@open-draft/until@2.1.0':
|
||||
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
resolution: {integrity: sha512-CjruKY9V6NMssL/T1kAFgzosF1v9o6oeN+aX5JB/C/xPNtmgIJqcXHG7fA82Ou1zCpWGl4lROQUKwUNE1pMCyg==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
resolution: {integrity: sha512-xomnUNi7TiAGtOgs0tb54LyrjRZLu9shJGGwkcN7NgtiPYOpNnKLkRJtzZvTjD/w6knSZH9sFZcUSUovYOPg6A==}
|
||||
engines: {node: '>=8.0.0'}
|
||||
@@ -1814,6 +1824,12 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.0.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0':
|
||||
resolution: {integrity: sha512-jOv40Bs9jy9bZVLo/i8FwUiuCvbjWDI+ZW13wimJm4LjnlwJxGgB+N/VWOZUTpM+ah/awXeQqKdNlpLf2EjvYg==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0':
|
||||
resolution: {integrity: sha512-5ULoU8p+tWcQw5PDYZn8rySptGSLZHNX/7srqo2TioPnAAcvTy6sQFQXsNPrAnyRRtYGMetXVyZUy5OaX1+IfA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1952,6 +1968,18 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0':
|
||||
resolution: {integrity: sha512-gMd39gIfVb2OgxldxUtOwGJYSH8P1kVFFlJLuut32L6KgUC4gl1dMhn+YC2mGn0bDOiQYSk/uHOdSjuKp58vvA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0':
|
||||
resolution: {integrity: sha512-DCFPY8C6lAQHUNkzcNT9R+qYExvsk6C5Bto2pbNxgicpcSWbe2WHShLxkOxIdNcBiYPdVHv/e7vH7K6TI+C+fQ==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.3.0
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2':
|
||||
resolution: {integrity: sha512-1BCcU93iwSRZvDAgwUxC/DV4T/406SkMfxGqu5ojc3AvNI+I9GhV7v0J1HljsczuuhcnFLYqD5VmwVXfCGHzxA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -1962,6 +1990,18 @@ packages:
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.3.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0':
|
||||
resolution: {integrity: sha512-QlAyL1jRpOeaqx7/leG1vJMp84g0xKP6gJmfELBpnI4O/9xPX+Hu5m1POk9Kl+veNkyth5t19hRlN6tNY1sjbA==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.4.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0':
|
||||
resolution: {integrity: sha512-G5KYP6+VJMZzpGipQw7Giif48h6SGQ2PFKEYCybeXJsOCB4fp8azqMAAzE5lnnHK3ZVwYQrgmFbsUJO/zOnwGw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': '>=1.9.0 <1.10.0'
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0':
|
||||
resolution: {integrity: sha512-xWQgL0Bmctsalg6PaXExmzdedSp3gyKV8mQBwK/j9VGdCDu2fmXIb2gAehBKbkXCpJ4HPkgv3QfoJWRT4dHWbw==}
|
||||
engines: {node: ^18.19.0 || >=20.6.0}
|
||||
@@ -2050,11 +2090,57 @@ packages:
|
||||
webpack-plugin-serve:
|
||||
optional: true
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
resolution: {integrity: sha512-knjncrk7qRmssFRbGzBl1Tunt21GRpe0Wv+uVelyL0Rh7PdQUsgguulzXFTps8hA6wPwTU4kq85qnbAJ3eH6Wg==}
|
||||
|
||||
'@posthog/react@1.7.0':
|
||||
resolution: {integrity: sha512-pM7GL7z/rKjiIwosbRiQA3buhLI6vUo+wg+T/ZrVZC7O5bVU07TfgNZTcuOj8E9dx7vDbfNrc1kjDN7PKMM8ug==}
|
||||
peerDependencies:
|
||||
'@types/react': '>=16.8.0'
|
||||
posthog-js: '>=1.257.2'
|
||||
react: '>=16.8.0'
|
||||
peerDependenciesMeta:
|
||||
'@types/react':
|
||||
optional: true
|
||||
|
||||
'@posthog/types@1.334.1':
|
||||
resolution: {integrity: sha512-ypFnwTO7qbV7icylLbujbamPdQXbJq0a61GUUBnJAeTbBw/qYPIss5IRYICcbCj0uunQrwD7/CGxVb5TOYKWgA==}
|
||||
|
||||
'@prisma/instrumentation@6.19.0':
|
||||
resolution: {integrity: sha512-QcuYy25pkXM8BJ37wVFBO7Zh34nyRV1GOb2n3lPkkbRYfl4hWl3PTcImP41P0KrzVXfa/45p6eVCos27x3exIg==}
|
||||
peerDependencies:
|
||||
'@opentelemetry/api': ^1.8
|
||||
|
||||
'@protobufjs/aspromise@1.1.2':
|
||||
resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==}
|
||||
|
||||
'@protobufjs/base64@1.1.2':
|
||||
resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==}
|
||||
|
||||
'@protobufjs/codegen@2.0.4':
|
||||
resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0':
|
||||
resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==}
|
||||
|
||||
'@protobufjs/float@1.0.2':
|
||||
resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==}
|
||||
|
||||
'@protobufjs/inquire@1.1.0':
|
||||
resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==}
|
||||
|
||||
'@protobufjs/path@1.1.2':
|
||||
resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==}
|
||||
|
||||
'@protobufjs/pool@1.1.0':
|
||||
resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==}
|
||||
|
||||
'@protobufjs/utf8@1.1.0':
|
||||
resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==}
|
||||
|
||||
'@radix-ui/number@1.1.1':
|
||||
resolution: {integrity: sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==}
|
||||
|
||||
@@ -3401,6 +3487,9 @@ packages:
|
||||
'@types/tedious@4.0.14':
|
||||
resolution: {integrity: sha512-KHPsfX/FoVbUGbyYvk1q9MMQHLPeRZhRJZdO45Q4YjvFkv4hMNghCWTvy7rdKessBsmtz4euWCWAB6/tVpI1Iw==}
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==}
|
||||
|
||||
'@types/unist@2.0.11':
|
||||
resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==}
|
||||
|
||||
@@ -4278,6 +4367,9 @@ packages:
|
||||
core-js-pure@3.47.0:
|
||||
resolution: {integrity: sha512-BcxeDbzUrRnXGYIVAGFtcGQVNpFcUhVjr6W7F8XktvQW2iJP9e66GP6xdKotCRFlrxBvNIBrhwKteRXqMV86Nw==}
|
||||
|
||||
core-js@3.48.0:
|
||||
resolution: {integrity: sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==}
|
||||
|
||||
core-util-is@1.0.3:
|
||||
resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==}
|
||||
|
||||
@@ -4569,6 +4661,9 @@ packages:
|
||||
resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==}
|
||||
engines: {node: '>= 4'}
|
||||
|
||||
dompurify@3.3.1:
|
||||
resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==}
|
||||
|
||||
domutils@2.8.0:
|
||||
resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==}
|
||||
|
||||
@@ -4939,6 +5034,9 @@ packages:
|
||||
picomatch:
|
||||
optional: true
|
||||
|
||||
fflate@0.4.8:
|
||||
resolution: {integrity: sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA==}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
resolution: {integrity: sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==}
|
||||
engines: {node: ^10.12.0 || >=12.0.0}
|
||||
@@ -5745,6 +5843,9 @@ packages:
|
||||
resolution: {integrity: sha512-HgMmCqIJSAKqo68l0rS2AanEWfkxaZ5wNiEFb5ggm08lDs9Xl2KxBlX3PTcaD2chBM1gXAYf491/M2Rv8Jwayg==}
|
||||
engines: {node: '>= 0.6.0'}
|
||||
|
||||
long@5.3.2:
|
||||
resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==}
|
||||
|
||||
longest-streak@3.1.0:
|
||||
resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==}
|
||||
|
||||
@@ -6534,6 +6635,12 @@ packages:
|
||||
resolution: {integrity: sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
posthog-js@1.334.1:
|
||||
resolution: {integrity: sha512-5cDzLICr2afnwX/cR9fwoLC0vN0Nb5gP5HiCigzHkgHdO+E3WsYefla3EFMQz7U4r01CBPZ+nZ9/srkzeACxtQ==}
|
||||
|
||||
preact@10.28.2:
|
||||
resolution: {integrity: sha512-lbteaWGzGHdlIuiJ0l2Jq454m6kcpI1zNje6d8MlGAFlYvP2GO4ibnat7P74Esfz4sPTdM6UxtTwh/d3pwM9JA==}
|
||||
|
||||
prelude-ls@1.2.1:
|
||||
resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==}
|
||||
engines: {node: '>= 0.8.0'}
|
||||
@@ -6622,6 +6729,10 @@ packages:
|
||||
property-information@7.1.0:
|
||||
resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==}
|
||||
engines: {node: '>=12.0.0'}
|
||||
|
||||
proxy-from-env@1.1.0:
|
||||
resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==}
|
||||
|
||||
@@ -6643,6 +6754,9 @@ packages:
|
||||
resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==}
|
||||
engines: {node: '>=0.6'}
|
||||
|
||||
query-selector-shadow-dom@1.0.1:
|
||||
resolution: {integrity: sha512-lT5yCqEBgfoMYpf3F2xQRK7zEr1rhIIZuceDK6+xRkJQ4NMbHTwXqk4NkwDwQMNqXgG9r9fyHnzwNVs6zV5KRw==}
|
||||
|
||||
querystring-es3@0.2.1:
|
||||
resolution: {integrity: sha512-773xhDQnZBMFobEiztv8LIl70ch5MSF/jUQVlhwFyBILqq96anmoctVIYz+ZRp0qbCKATTn6ev02M3r7Ga5vqA==}
|
||||
engines: {node: '>=0.4.x'}
|
||||
@@ -7821,6 +7935,9 @@ packages:
|
||||
web-namespaces@2.0.1:
|
||||
resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==}
|
||||
|
||||
web-vitals@5.1.0:
|
||||
resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==}
|
||||
|
||||
webidl-conversions@3.0.1:
|
||||
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
|
||||
|
||||
@@ -9420,6 +9537,10 @@ snapshots:
|
||||
|
||||
'@open-draft/until@2.1.0': {}
|
||||
|
||||
'@opentelemetry/api-logs@0.208.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
|
||||
'@opentelemetry/api-logs@0.209.0':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9435,6 +9556,15 @@ snapshots:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/exporter-logs-otlp-http@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-exporter-base': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/instrumentation-amqplib@0.55.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9629,6 +9759,23 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@opentelemetry/otlp-exporter-base@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/otlp-transformer': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/otlp-transformer@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-metrics': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-trace-base': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
protobufjs: 7.5.4
|
||||
|
||||
'@opentelemetry/redis-common@0.38.2': {}
|
||||
|
||||
'@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
@@ -9637,6 +9784,19 @@ snapshots:
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/semantic-conventions': 1.38.0
|
||||
|
||||
'@opentelemetry/sdk-logs@0.208.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-metrics@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/core': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
|
||||
'@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9801,6 +9961,19 @@ snapshots:
|
||||
type-fest: 4.41.0
|
||||
webpack-hot-middleware: 2.26.1
|
||||
|
||||
'@posthog/core@1.13.0':
|
||||
dependencies:
|
||||
cross-spawn: 7.0.6
|
||||
|
||||
'@posthog/react@1.7.0(@types/react@18.3.17)(posthog-js@1.334.1)(react@18.3.1)':
|
||||
dependencies:
|
||||
posthog-js: 1.334.1
|
||||
react: 18.3.1
|
||||
optionalDependencies:
|
||||
'@types/react': 18.3.17
|
||||
|
||||
'@posthog/types@1.334.1': {}
|
||||
|
||||
'@prisma/instrumentation@6.19.0(@opentelemetry/api@1.9.0)':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
@@ -9808,6 +9981,29 @@ snapshots:
|
||||
transitivePeerDependencies:
|
||||
- supports-color
|
||||
|
||||
'@protobufjs/aspromise@1.1.2': {}
|
||||
|
||||
'@protobufjs/base64@1.1.2': {}
|
||||
|
||||
'@protobufjs/codegen@2.0.4': {}
|
||||
|
||||
'@protobufjs/eventemitter@1.1.0': {}
|
||||
|
||||
'@protobufjs/fetch@1.1.0':
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
|
||||
'@protobufjs/float@1.0.2': {}
|
||||
|
||||
'@protobufjs/inquire@1.1.0': {}
|
||||
|
||||
'@protobufjs/path@1.1.2': {}
|
||||
|
||||
'@protobufjs/pool@1.1.0': {}
|
||||
|
||||
'@protobufjs/utf8@1.1.0': {}
|
||||
|
||||
'@radix-ui/number@1.1.1': {}
|
||||
|
||||
'@radix-ui/primitive@1.1.3': {}
|
||||
@@ -11426,6 +11622,9 @@ snapshots:
|
||||
dependencies:
|
||||
'@types/node': 24.10.0
|
||||
|
||||
'@types/trusted-types@2.0.7':
|
||||
optional: true
|
||||
|
||||
'@types/unist@2.0.11': {}
|
||||
|
||||
'@types/unist@3.0.3': {}
|
||||
@@ -12327,6 +12526,8 @@ snapshots:
|
||||
|
||||
core-js-pure@3.47.0: {}
|
||||
|
||||
core-js@3.48.0: {}
|
||||
|
||||
core-util-is@1.0.3: {}
|
||||
|
||||
cosmiconfig@7.1.0:
|
||||
@@ -12636,6 +12837,10 @@ snapshots:
|
||||
dependencies:
|
||||
domelementtype: 2.3.0
|
||||
|
||||
dompurify@3.3.1:
|
||||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
domutils@2.8.0:
|
||||
dependencies:
|
||||
dom-serializer: 1.4.1
|
||||
@@ -13205,6 +13410,8 @@ snapshots:
|
||||
optionalDependencies:
|
||||
picomatch: 4.0.3
|
||||
|
||||
fflate@0.4.8: {}
|
||||
|
||||
file-entry-cache@6.0.1:
|
||||
dependencies:
|
||||
flat-cache: 3.2.0
|
||||
@@ -14092,6 +14299,8 @@ snapshots:
|
||||
|
||||
loglevel@1.9.2: {}
|
||||
|
||||
long@5.3.2: {}
|
||||
|
||||
longest-streak@3.1.0: {}
|
||||
|
||||
loose-envify@1.4.0:
|
||||
@@ -15154,6 +15363,24 @@ snapshots:
|
||||
dependencies:
|
||||
xtend: 4.0.2
|
||||
|
||||
posthog-js@1.334.1:
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/api-logs': 0.208.0
|
||||
'@opentelemetry/exporter-logs-otlp-http': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/resources': 2.2.0(@opentelemetry/api@1.9.0)
|
||||
'@opentelemetry/sdk-logs': 0.208.0(@opentelemetry/api@1.9.0)
|
||||
'@posthog/core': 1.13.0
|
||||
'@posthog/types': 1.334.1
|
||||
core-js: 3.48.0
|
||||
dompurify: 3.3.1
|
||||
fflate: 0.4.8
|
||||
preact: 10.28.2
|
||||
query-selector-shadow-dom: 1.0.1
|
||||
web-vitals: 5.1.0
|
||||
|
||||
preact@10.28.2: {}
|
||||
|
||||
prelude-ls@1.2.1: {}
|
||||
|
||||
prettier-plugin-tailwindcss@0.7.1(prettier@3.6.2):
|
||||
@@ -15187,6 +15414,21 @@ snapshots:
|
||||
|
||||
property-information@7.1.0: {}
|
||||
|
||||
protobufjs@7.5.4:
|
||||
dependencies:
|
||||
'@protobufjs/aspromise': 1.1.2
|
||||
'@protobufjs/base64': 1.1.2
|
||||
'@protobufjs/codegen': 2.0.4
|
||||
'@protobufjs/eventemitter': 1.1.0
|
||||
'@protobufjs/fetch': 1.1.0
|
||||
'@protobufjs/float': 1.0.2
|
||||
'@protobufjs/inquire': 1.1.0
|
||||
'@protobufjs/path': 1.1.2
|
||||
'@protobufjs/pool': 1.1.0
|
||||
'@protobufjs/utf8': 1.1.0
|
||||
'@types/node': 24.10.0
|
||||
long: 5.3.2
|
||||
|
||||
proxy-from-env@1.1.0: {}
|
||||
|
||||
public-encrypt@4.0.3:
|
||||
@@ -15208,6 +15450,8 @@ snapshots:
|
||||
dependencies:
|
||||
side-channel: 1.1.0
|
||||
|
||||
query-selector-shadow-dom@1.0.1: {}
|
||||
|
||||
querystring-es3@0.2.1: {}
|
||||
|
||||
queue-microtask@1.2.3: {}
|
||||
@@ -16619,6 +16863,8 @@ snapshots:
|
||||
|
||||
web-namespaces@2.0.1: {}
|
||||
|
||||
web-vitals@5.1.0: {}
|
||||
|
||||
webidl-conversions@3.0.1: {}
|
||||
|
||||
webidl-conversions@8.0.1:
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
"use client";
|
||||
|
||||
import { createContext, useContext, useRef, type ReactNode } from "react";
|
||||
|
||||
interface NewChatContextValue {
|
||||
onNewChatClick: () => void;
|
||||
setOnNewChatClick: (handler?: () => void) => void;
|
||||
performNewChat?: () => void;
|
||||
setPerformNewChat: (handler?: () => void) => void;
|
||||
}
|
||||
|
||||
const NewChatContext = createContext<NewChatContextValue | null>(null);
|
||||
|
||||
export function NewChatProvider({ children }: { children: ReactNode }) {
|
||||
const onNewChatRef = useRef<(() => void) | undefined>();
|
||||
const performNewChatRef = useRef<(() => void) | undefined>();
|
||||
const contextValueRef = useRef<NewChatContextValue>({
|
||||
onNewChatClick() {
|
||||
onNewChatRef.current?.();
|
||||
},
|
||||
setOnNewChatClick(handler?: () => void) {
|
||||
onNewChatRef.current = handler;
|
||||
},
|
||||
performNewChat() {
|
||||
performNewChatRef.current?.();
|
||||
},
|
||||
setPerformNewChat(handler?: () => void) {
|
||||
performNewChatRef.current = handler;
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<NewChatContext.Provider value={contextValueRef.current}>
|
||||
{children}
|
||||
</NewChatContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useNewChat() {
|
||||
return useContext(NewChatContext);
|
||||
}
|
||||
@@ -1,8 +1,10 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader";
|
||||
import { NAVBAR_HEIGHT_PX } from "@/lib/constants";
|
||||
import type { ReactNode } from "react";
|
||||
import { useEffect } from "react";
|
||||
import { useNewChat } from "../../NewChatContext";
|
||||
import { DesktopSidebar } from "./components/DesktopSidebar/DesktopSidebar";
|
||||
import { LoadingState } from "./components/LoadingState/LoadingState";
|
||||
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
|
||||
@@ -33,10 +35,25 @@ export function CopilotShell({ children }: Props) {
|
||||
isReadyToShowContent,
|
||||
} = useCopilotShell();
|
||||
|
||||
const newChatContext = useNewChat();
|
||||
const handleNewChatClickWrapper =
|
||||
newChatContext?.onNewChatClick || handleNewChat;
|
||||
|
||||
useEffect(
|
||||
function registerNewChatHandler() {
|
||||
if (!newChatContext) return;
|
||||
newChatContext.setPerformNewChat(handleNewChat);
|
||||
return function cleanup() {
|
||||
newChatContext.setPerformNewChat(undefined);
|
||||
};
|
||||
},
|
||||
[newChatContext, handleNewChat],
|
||||
);
|
||||
|
||||
if (!isLoggedIn) {
|
||||
return (
|
||||
<div className="flex h-full items-center justify-center">
|
||||
<LoadingSpinner size="large" />
|
||||
<ChatLoader />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -55,7 +72,7 @@ export function CopilotShell({ children }: Props) {
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
onSelectSession={handleSelectSession}
|
||||
onFetchNextPage={fetchNextPage}
|
||||
onNewChat={handleNewChat}
|
||||
onNewChat={handleNewChatClickWrapper}
|
||||
hasActiveSession={Boolean(hasActiveSession)}
|
||||
/>
|
||||
)}
|
||||
@@ -77,7 +94,7 @@ export function CopilotShell({ children }: Props) {
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
onSelectSession={handleSelectSession}
|
||||
onFetchNextPage={fetchNextPage}
|
||||
onNewChat={handleNewChat}
|
||||
onNewChat={handleNewChatClickWrapper}
|
||||
onClose={handleCloseDrawer}
|
||||
onOpenChange={handleDrawerOpenChange}
|
||||
hasActiveSession={Boolean(hasActiveSession)}
|
||||
|
||||
@@ -148,13 +148,15 @@ export function useCopilotShell() {
|
||||
setHasAutoSelectedSession(false);
|
||||
}
|
||||
|
||||
const isLoading = isSessionsLoading && accumulatedSessions.length === 0;
|
||||
|
||||
return {
|
||||
isMobile,
|
||||
isDrawerOpen,
|
||||
isLoggedIn,
|
||||
hasActiveSession:
|
||||
Boolean(currentSessionId) && (!isOnHomepage || Boolean(paramSessionId)),
|
||||
isLoading: isSessionsLoading || !areAllSessionsLoaded,
|
||||
isLoading,
|
||||
sessions: visibleSessions,
|
||||
currentSessionId: sidebarSelectedSessionId,
|
||||
handleSelectSession,
|
||||
|
||||
@@ -1,5 +1,28 @@
|
||||
import type { User } from "@supabase/supabase-js";
|
||||
|
||||
export type PageState =
|
||||
| { type: "welcome" }
|
||||
| { type: "newChat" }
|
||||
| { type: "creating"; prompt: string }
|
||||
| { type: "chat"; sessionId: string; initialPrompt?: string };
|
||||
|
||||
export function getInitialPromptFromState(
|
||||
pageState: PageState,
|
||||
storedInitialPrompt: string | undefined,
|
||||
) {
|
||||
if (storedInitialPrompt) return storedInitialPrompt;
|
||||
if (pageState.type === "creating") return pageState.prompt;
|
||||
if (pageState.type === "chat") return pageState.initialPrompt;
|
||||
}
|
||||
|
||||
export function shouldResetToWelcome(pageState: PageState) {
|
||||
return (
|
||||
pageState.type !== "newChat" &&
|
||||
pageState.type !== "creating" &&
|
||||
pageState.type !== "welcome"
|
||||
);
|
||||
}
|
||||
|
||||
export function getGreetingName(user?: User | null): string {
|
||||
if (!user) return "there";
|
||||
const metadata = user.user_metadata as Record<string, unknown> | undefined;
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import type { ReactNode } from "react";
|
||||
import { NewChatProvider } from "./NewChatContext";
|
||||
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
||||
|
||||
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
||||
return <CopilotShell>{children}</CopilotShell>;
|
||||
return (
|
||||
<NewChatProvider>
|
||||
<CopilotShell>{children}</CopilotShell>
|
||||
</NewChatProvider>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,142 +1,35 @@
|
||||
"use client";
|
||||
|
||||
import { postV2CreateSession } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Chat } from "@/components/contextual/Chat/Chat";
|
||||
import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import {
|
||||
Flag,
|
||||
type FlagValues,
|
||||
useGetFlag,
|
||||
} from "@/services/feature-flags/use-get-flag";
|
||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
import { getGreetingName, getQuickActions } from "./helpers";
|
||||
|
||||
type PageState =
|
||||
| { type: "welcome" }
|
||||
| { type: "creating"; prompt: string }
|
||||
| { type: "chat"; sessionId: string; initialPrompt?: string };
|
||||
import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useCopilotPage } from "./useCopilotPage";
|
||||
|
||||
export default function CopilotPage() {
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const { user, isLoggedIn, isUserLoading } = useSupabase();
|
||||
const { state, handlers } = useCopilotPage();
|
||||
const {
|
||||
greetingName,
|
||||
quickActions,
|
||||
isLoading,
|
||||
pageState,
|
||||
isNewChatModalOpen,
|
||||
isReady,
|
||||
} = state;
|
||||
const {
|
||||
handleQuickAction,
|
||||
startChatWithPrompt,
|
||||
handleSessionNotFound,
|
||||
handleStreamingChange,
|
||||
handleCancelNewChat,
|
||||
proceedWithNewChat,
|
||||
handleNewChatModalOpen,
|
||||
} = handlers;
|
||||
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const flags = useFlags<FlagValues>();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||
const isFlagReady =
|
||||
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
|
||||
|
||||
const [pageState, setPageState] = useState<PageState>({ type: "welcome" });
|
||||
const initialPromptRef = useRef<Map<string, string>>(new Map());
|
||||
|
||||
const urlSessionId = searchParams.get("sessionId");
|
||||
|
||||
// Sync with URL sessionId (preserve initialPrompt from ref)
|
||||
useEffect(
|
||||
function syncSessionFromUrl() {
|
||||
if (urlSessionId) {
|
||||
// If we're already in chat state with this sessionId, don't overwrite
|
||||
if (pageState.type === "chat" && pageState.sessionId === urlSessionId) {
|
||||
return;
|
||||
}
|
||||
// Get initialPrompt from ref or current state
|
||||
const storedInitialPrompt = initialPromptRef.current.get(urlSessionId);
|
||||
const currentInitialPrompt =
|
||||
storedInitialPrompt ||
|
||||
(pageState.type === "creating"
|
||||
? pageState.prompt
|
||||
: pageState.type === "chat"
|
||||
? pageState.initialPrompt
|
||||
: undefined);
|
||||
if (currentInitialPrompt) {
|
||||
initialPromptRef.current.set(urlSessionId, currentInitialPrompt);
|
||||
}
|
||||
setPageState({
|
||||
type: "chat",
|
||||
sessionId: urlSessionId,
|
||||
initialPrompt: currentInitialPrompt,
|
||||
});
|
||||
} else if (pageState.type === "chat") {
|
||||
setPageState({ type: "welcome" });
|
||||
}
|
||||
},
|
||||
[urlSessionId],
|
||||
);
|
||||
|
||||
useEffect(
|
||||
function ensureAccess() {
|
||||
if (!isFlagReady) return;
|
||||
if (isChatEnabled === false) {
|
||||
router.replace(homepageRoute);
|
||||
}
|
||||
},
|
||||
[homepageRoute, isChatEnabled, isFlagReady, router],
|
||||
);
|
||||
|
||||
const greetingName = useMemo(
|
||||
function getName() {
|
||||
return getGreetingName(user);
|
||||
},
|
||||
[user],
|
||||
);
|
||||
|
||||
const quickActions = useMemo(function getActions() {
|
||||
return getQuickActions();
|
||||
}, []);
|
||||
|
||||
async function startChatWithPrompt(prompt: string) {
|
||||
if (!prompt?.trim()) return;
|
||||
if (pageState.type === "creating") return;
|
||||
|
||||
const trimmedPrompt = prompt.trim();
|
||||
setPageState({ type: "creating", prompt: trimmedPrompt });
|
||||
|
||||
try {
|
||||
// Create session
|
||||
const sessionResponse = await postV2CreateSession({
|
||||
body: JSON.stringify({}),
|
||||
});
|
||||
|
||||
if (sessionResponse.status !== 200 || !sessionResponse.data?.id) {
|
||||
throw new Error("Failed to create session");
|
||||
}
|
||||
|
||||
const sessionId = sessionResponse.data.id;
|
||||
|
||||
// Store initialPrompt in ref so it persists across re-renders
|
||||
initialPromptRef.current.set(sessionId, trimmedPrompt);
|
||||
|
||||
// Update URL and show Chat with initial prompt
|
||||
// Chat will handle sending the message and streaming
|
||||
window.history.replaceState(null, "", `/copilot?sessionId=${sessionId}`);
|
||||
setPageState({ type: "chat", sessionId, initialPrompt: trimmedPrompt });
|
||||
} catch (error) {
|
||||
console.error("[CopilotPage] Failed to start chat:", error);
|
||||
setPageState({ type: "welcome" });
|
||||
}
|
||||
}
|
||||
|
||||
function handleQuickAction(action: string) {
|
||||
startChatWithPrompt(action);
|
||||
}
|
||||
|
||||
function handleSessionNotFound() {
|
||||
router.replace("/copilot");
|
||||
}
|
||||
|
||||
if (!isFlagReady || isChatEnabled === false || !isLoggedIn) {
|
||||
if (!isReady) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -150,7 +43,55 @@ export default function CopilotPage() {
|
||||
urlSessionId={pageState.sessionId}
|
||||
initialPrompt={pageState.initialPrompt}
|
||||
onSessionNotFound={handleSessionNotFound}
|
||||
onStreamingChange={handleStreamingChange}
|
||||
/>
|
||||
<Dialog
|
||||
title="Interrupt current chat?"
|
||||
styling={{ maxWidth: 300, width: "100%" }}
|
||||
controlled={{
|
||||
isOpen: isNewChatModalOpen,
|
||||
set: handleNewChatModalOpen,
|
||||
}}
|
||||
onClose={handleCancelNewChat}
|
||||
>
|
||||
<Dialog.Content>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Text variant="body">
|
||||
The current chat response will be interrupted. Are you sure you
|
||||
want to start a new chat?
|
||||
</Text>
|
||||
<Dialog.Footer>
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={handleCancelNewChat}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant="primary"
|
||||
onClick={proceedWithNewChat}
|
||||
>
|
||||
Start new chat
|
||||
</Button>
|
||||
</Dialog.Footer>
|
||||
</div>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (pageState.type === "newChat") {
|
||||
return (
|
||||
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<ChatLoader />
|
||||
<Text variant="body" className="text-zinc-500">
|
||||
Loading your chats...
|
||||
</Text>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -158,18 +99,18 @@ export default function CopilotPage() {
|
||||
// Show loading state while creating session and sending first message
|
||||
if (pageState.type === "creating") {
|
||||
return (
|
||||
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9] px-6 py-10">
|
||||
<LoadingSpinner size="large" />
|
||||
<Text variant="body" className="mt-4 text-zinc-500">
|
||||
Starting your chat...
|
||||
</Text>
|
||||
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9]">
|
||||
<div className="flex flex-col items-center gap-4">
|
||||
<ChatLoader />
|
||||
<Text variant="body" className="text-zinc-500">
|
||||
Loading your chats...
|
||||
</Text>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Show Welcome screen
|
||||
const isLoading = isUserLoading;
|
||||
|
||||
return (
|
||||
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-6 py-10">
|
||||
<div className="w-full text-center">
|
||||
|
||||
@@ -0,0 +1,266 @@
|
||||
import { postV2CreateSession } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import {
|
||||
Flag,
|
||||
type FlagValues,
|
||||
useGetFlag,
|
||||
} from "@/services/feature-flags/use-get-flag";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useReducer } from "react";
|
||||
import { useNewChat } from "./NewChatContext";
|
||||
import { getGreetingName, getQuickActions, type PageState } from "./helpers";
|
||||
import { useCopilotURLState } from "./useCopilotURLState";
|
||||
|
||||
type CopilotState = {
|
||||
pageState: PageState;
|
||||
isStreaming: boolean;
|
||||
isNewChatModalOpen: boolean;
|
||||
initialPrompts: Record<string, string>;
|
||||
previousSessionId: string | null;
|
||||
};
|
||||
|
||||
type CopilotAction =
|
||||
| { type: "setPageState"; pageState: PageState }
|
||||
| { type: "setStreaming"; isStreaming: boolean }
|
||||
| { type: "setNewChatModalOpen"; isOpen: boolean }
|
||||
| { type: "setInitialPrompt"; sessionId: string; prompt: string }
|
||||
| { type: "setPreviousSessionId"; sessionId: string | null };
|
||||
|
||||
function isSamePageState(next: PageState, current: PageState) {
|
||||
if (next.type !== current.type) return false;
|
||||
if (next.type === "creating" && current.type === "creating") {
|
||||
return next.prompt === current.prompt;
|
||||
}
|
||||
if (next.type === "chat" && current.type === "chat") {
|
||||
return (
|
||||
next.sessionId === current.sessionId &&
|
||||
next.initialPrompt === current.initialPrompt
|
||||
);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function copilotReducer(
|
||||
state: CopilotState,
|
||||
action: CopilotAction,
|
||||
): CopilotState {
|
||||
if (action.type === "setPageState") {
|
||||
if (isSamePageState(action.pageState, state.pageState)) return state;
|
||||
return { ...state, pageState: action.pageState };
|
||||
}
|
||||
if (action.type === "setStreaming") {
|
||||
if (action.isStreaming === state.isStreaming) return state;
|
||||
return { ...state, isStreaming: action.isStreaming };
|
||||
}
|
||||
if (action.type === "setNewChatModalOpen") {
|
||||
if (action.isOpen === state.isNewChatModalOpen) return state;
|
||||
return { ...state, isNewChatModalOpen: action.isOpen };
|
||||
}
|
||||
if (action.type === "setInitialPrompt") {
|
||||
if (state.initialPrompts[action.sessionId] === action.prompt) return state;
|
||||
return {
|
||||
...state,
|
||||
initialPrompts: {
|
||||
...state.initialPrompts,
|
||||
[action.sessionId]: action.prompt,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (action.type === "setPreviousSessionId") {
|
||||
if (state.previousSessionId === action.sessionId) return state;
|
||||
return { ...state, previousSessionId: action.sessionId };
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
export function useCopilotPage() {
|
||||
const router = useRouter();
|
||||
const { user, isLoggedIn, isUserLoading } = useSupabase();
|
||||
const { toast } = useToast();
|
||||
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const flags = useFlags<FlagValues>();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||
const isFlagReady =
|
||||
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
|
||||
|
||||
const [state, dispatch] = useReducer(copilotReducer, {
|
||||
pageState: { type: "welcome" },
|
||||
isStreaming: false,
|
||||
isNewChatModalOpen: false,
|
||||
initialPrompts: {},
|
||||
previousSessionId: null,
|
||||
});
|
||||
|
||||
const newChatContext = useNewChat();
|
||||
const greetingName = getGreetingName(user);
|
||||
const quickActions = getQuickActions();
|
||||
|
||||
function setPageState(pageState: PageState) {
|
||||
dispatch({ type: "setPageState", pageState });
|
||||
}
|
||||
|
||||
function setInitialPrompt(sessionId: string, prompt: string) {
|
||||
dispatch({ type: "setInitialPrompt", sessionId, prompt });
|
||||
}
|
||||
|
||||
function setPreviousSessionId(sessionId: string | null) {
|
||||
dispatch({ type: "setPreviousSessionId", sessionId });
|
||||
}
|
||||
|
||||
const { setUrlSessionId } = useCopilotURLState({
|
||||
pageState: state.pageState,
|
||||
initialPrompts: state.initialPrompts,
|
||||
previousSessionId: state.previousSessionId,
|
||||
setPageState,
|
||||
setInitialPrompt,
|
||||
setPreviousSessionId,
|
||||
});
|
||||
|
||||
useEffect(
|
||||
function registerNewChatHandler() {
|
||||
if (!newChatContext) return;
|
||||
newChatContext.setOnNewChatClick(handleNewChatClick);
|
||||
return function cleanup() {
|
||||
newChatContext.setOnNewChatClick(undefined);
|
||||
};
|
||||
},
|
||||
[newChatContext, handleNewChatClick],
|
||||
);
|
||||
|
||||
useEffect(
|
||||
function transitionNewChatToWelcome() {
|
||||
if (state.pageState.type === "newChat") {
|
||||
function setWelcomeState() {
|
||||
dispatch({ type: "setPageState", pageState: { type: "welcome" } });
|
||||
}
|
||||
|
||||
const timer = setTimeout(setWelcomeState, 300);
|
||||
|
||||
return function cleanup() {
|
||||
clearTimeout(timer);
|
||||
};
|
||||
}
|
||||
},
|
||||
[state.pageState.type],
|
||||
);
|
||||
|
||||
useEffect(
|
||||
function ensureAccess() {
|
||||
if (!isFlagReady) return;
|
||||
if (isChatEnabled === false) {
|
||||
router.replace(homepageRoute);
|
||||
}
|
||||
},
|
||||
[homepageRoute, isChatEnabled, isFlagReady, router],
|
||||
);
|
||||
|
||||
async function startChatWithPrompt(prompt: string) {
|
||||
if (!prompt?.trim()) return;
|
||||
if (state.pageState.type === "creating") return;
|
||||
|
||||
const trimmedPrompt = prompt.trim();
|
||||
dispatch({
|
||||
type: "setPageState",
|
||||
pageState: { type: "creating", prompt: trimmedPrompt },
|
||||
});
|
||||
|
||||
try {
|
||||
const sessionResponse = await postV2CreateSession({
|
||||
body: JSON.stringify({}),
|
||||
});
|
||||
|
||||
if (sessionResponse.status !== 200 || !sessionResponse.data?.id) {
|
||||
throw new Error("Failed to create session");
|
||||
}
|
||||
|
||||
const sessionId = sessionResponse.data.id;
|
||||
|
||||
dispatch({
|
||||
type: "setInitialPrompt",
|
||||
sessionId,
|
||||
prompt: trimmedPrompt,
|
||||
});
|
||||
|
||||
await setUrlSessionId(sessionId, { shallow: false });
|
||||
dispatch({
|
||||
type: "setPageState",
|
||||
pageState: { type: "chat", sessionId, initialPrompt: trimmedPrompt },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("[CopilotPage] Failed to start chat:", error);
|
||||
toast({ title: "Failed to start chat", variant: "destructive" });
|
||||
Sentry.captureException(error);
|
||||
dispatch({ type: "setPageState", pageState: { type: "welcome" } });
|
||||
}
|
||||
}
|
||||
|
||||
function handleQuickAction(action: string) {
|
||||
startChatWithPrompt(action);
|
||||
}
|
||||
|
||||
function handleSessionNotFound() {
|
||||
router.replace("/copilot");
|
||||
}
|
||||
|
||||
function handleStreamingChange(isStreamingValue: boolean) {
|
||||
dispatch({ type: "setStreaming", isStreaming: isStreamingValue });
|
||||
}
|
||||
|
||||
async function proceedWithNewChat() {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: false });
|
||||
if (newChatContext?.performNewChat) {
|
||||
newChatContext.performNewChat();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await setUrlSessionId(null, { shallow: false });
|
||||
} catch (error) {
|
||||
console.error("[CopilotPage] Failed to clear session:", error);
|
||||
}
|
||||
router.replace("/copilot");
|
||||
}
|
||||
|
||||
function handleCancelNewChat() {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: false });
|
||||
}
|
||||
|
||||
function handleNewChatModalOpen(isOpen: boolean) {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen });
|
||||
}
|
||||
|
||||
function handleNewChatClick() {
|
||||
if (state.isStreaming) {
|
||||
dispatch({ type: "setNewChatModalOpen", isOpen: true });
|
||||
} else {
|
||||
proceedWithNewChat();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
state: {
|
||||
greetingName,
|
||||
quickActions,
|
||||
isLoading: isUserLoading,
|
||||
pageState: state.pageState,
|
||||
isNewChatModalOpen: state.isNewChatModalOpen,
|
||||
isReady: isFlagReady && isChatEnabled !== false && isLoggedIn,
|
||||
},
|
||||
handlers: {
|
||||
handleQuickAction,
|
||||
startChatWithPrompt,
|
||||
handleSessionNotFound,
|
||||
handleStreamingChange,
|
||||
handleCancelNewChat,
|
||||
proceedWithNewChat,
|
||||
handleNewChatModalOpen,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
import { parseAsString, useQueryState } from "nuqs";
|
||||
import { useLayoutEffect } from "react";
|
||||
import {
|
||||
getInitialPromptFromState,
|
||||
type PageState,
|
||||
shouldResetToWelcome,
|
||||
} from "./helpers";
|
||||
|
||||
interface UseCopilotUrlStateArgs {
|
||||
pageState: PageState;
|
||||
initialPrompts: Record<string, string>;
|
||||
previousSessionId: string | null;
|
||||
setPageState: (pageState: PageState) => void;
|
||||
setInitialPrompt: (sessionId: string, prompt: string) => void;
|
||||
setPreviousSessionId: (sessionId: string | null) => void;
|
||||
}
|
||||
|
||||
export function useCopilotURLState({
|
||||
pageState,
|
||||
initialPrompts,
|
||||
previousSessionId,
|
||||
setPageState,
|
||||
setInitialPrompt,
|
||||
setPreviousSessionId,
|
||||
}: UseCopilotUrlStateArgs) {
|
||||
const [urlSessionId, setUrlSessionId] = useQueryState(
|
||||
"sessionId",
|
||||
parseAsString,
|
||||
);
|
||||
|
||||
function syncSessionFromUrl() {
|
||||
if (urlSessionId) {
|
||||
if (pageState.type === "chat" && pageState.sessionId === urlSessionId) {
|
||||
setPreviousSessionId(urlSessionId);
|
||||
return;
|
||||
}
|
||||
|
||||
const storedInitialPrompt = initialPrompts[urlSessionId];
|
||||
const currentInitialPrompt = getInitialPromptFromState(
|
||||
pageState,
|
||||
storedInitialPrompt,
|
||||
);
|
||||
|
||||
if (currentInitialPrompt) {
|
||||
setInitialPrompt(urlSessionId, currentInitialPrompt);
|
||||
}
|
||||
|
||||
setPageState({
|
||||
type: "chat",
|
||||
sessionId: urlSessionId,
|
||||
initialPrompt: currentInitialPrompt,
|
||||
});
|
||||
setPreviousSessionId(urlSessionId);
|
||||
return;
|
||||
}
|
||||
|
||||
const wasInChat = previousSessionId !== null && pageState.type === "chat";
|
||||
setPreviousSessionId(null);
|
||||
if (wasInChat) {
|
||||
setPageState({ type: "newChat" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (shouldResetToWelcome(pageState)) {
|
||||
setPageState({ type: "welcome" });
|
||||
}
|
||||
}
|
||||
|
||||
useLayoutEffect(syncSessionFromUrl, [
|
||||
urlSessionId,
|
||||
pageState.type,
|
||||
previousSessionId,
|
||||
initialPrompts,
|
||||
]);
|
||||
|
||||
return {
|
||||
urlSessionId,
|
||||
setUrlSessionId,
|
||||
};
|
||||
}
|
||||
@@ -1,33 +1,62 @@
|
||||
"use client";
|
||||
|
||||
import { TooltipProvider } from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import {
|
||||
PostHogPageViewTracker,
|
||||
PostHogUserTracker,
|
||||
} from "@/components/monitor/PostHogUserTracker";
|
||||
import { SentryUserTracker } from "@/components/monitor/SentryUserTracker";
|
||||
import { BackendAPIProvider } from "@/lib/autogpt-server-api/context";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import CredentialsProvider from "@/providers/agent-credentials/credentials-provider";
|
||||
import OnboardingProvider from "@/providers/onboarding/onboarding-provider";
|
||||
import { LaunchDarklyProvider } from "@/services/feature-flags/feature-flag-provider";
|
||||
import { PostHogProvider as PHProvider } from "@posthog/react";
|
||||
import { QueryClientProvider } from "@tanstack/react-query";
|
||||
import { ThemeProvider, ThemeProviderProps } from "next-themes";
|
||||
import { NuqsAdapter } from "nuqs/adapters/next/app";
|
||||
import posthog from "posthog-js";
|
||||
import { Suspense, useEffect } from "react";
|
||||
|
||||
function PostHogProvider({ children }: { children: React.ReactNode }) {
|
||||
useEffect(() => {
|
||||
if (process.env.NEXT_PUBLIC_POSTHOG_KEY) {
|
||||
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY, {
|
||||
api_host: process.env.NEXT_PUBLIC_POSTHOG_HOST,
|
||||
defaults: "2025-11-30",
|
||||
capture_pageview: false,
|
||||
capture_pageleave: true,
|
||||
autocapture: true,
|
||||
});
|
||||
}
|
||||
}, []);
|
||||
|
||||
return <PHProvider client={posthog}>{children}</PHProvider>;
|
||||
}
|
||||
|
||||
export function Providers({ children, ...props }: ThemeProviderProps) {
|
||||
const queryClient = getQueryClient();
|
||||
return (
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<NuqsAdapter>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
<PostHogProvider>
|
||||
<BackendAPIProvider>
|
||||
<SentryUserTracker />
|
||||
<PostHogUserTracker />
|
||||
<Suspense fallback={null}>
|
||||
<PostHogPageViewTracker />
|
||||
</Suspense>
|
||||
<CredentialsProvider>
|
||||
<LaunchDarklyProvider>
|
||||
<OnboardingProvider>
|
||||
<ThemeProvider forcedTheme="light" {...props}>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</ThemeProvider>
|
||||
</OnboardingProvider>
|
||||
</LaunchDarklyProvider>
|
||||
</CredentialsProvider>
|
||||
</BackendAPIProvider>
|
||||
</PostHogProvider>
|
||||
</NuqsAdapter>
|
||||
</QueryClientProvider>
|
||||
);
|
||||
|
||||
@@ -13,6 +13,7 @@ export interface ChatProps {
|
||||
urlSessionId?: string | null;
|
||||
initialPrompt?: string;
|
||||
onSessionNotFound?: () => void;
|
||||
onStreamingChange?: (isStreaming: boolean) => void;
|
||||
}
|
||||
|
||||
export function Chat({
|
||||
@@ -20,6 +21,7 @@ export function Chat({
|
||||
urlSessionId,
|
||||
initialPrompt,
|
||||
onSessionNotFound,
|
||||
onStreamingChange,
|
||||
}: ChatProps) {
|
||||
const hasHandledNotFoundRef = useRef(false);
|
||||
const {
|
||||
@@ -73,6 +75,7 @@ export function Chat({
|
||||
initialMessages={messages}
|
||||
initialPrompt={initialPrompt}
|
||||
className="flex-1"
|
||||
onStreamingChange={onStreamingChange}
|
||||
/>
|
||||
)}
|
||||
</main>
|
||||
|
||||
@@ -4,6 +4,7 @@ import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useEffect } from "react";
|
||||
import { ChatInput } from "../ChatInput/ChatInput";
|
||||
import { MessageList } from "../MessageList/MessageList";
|
||||
import { useChatContainer } from "./useChatContainer";
|
||||
@@ -13,6 +14,7 @@ export interface ChatContainerProps {
|
||||
initialMessages: SessionDetailResponse["messages"];
|
||||
initialPrompt?: string;
|
||||
className?: string;
|
||||
onStreamingChange?: (isStreaming: boolean) => void;
|
||||
}
|
||||
|
||||
export function ChatContainer({
|
||||
@@ -20,6 +22,7 @@ export function ChatContainer({
|
||||
initialMessages,
|
||||
initialPrompt,
|
||||
className,
|
||||
onStreamingChange,
|
||||
}: ChatContainerProps) {
|
||||
const {
|
||||
messages,
|
||||
@@ -36,6 +39,10 @@ export function ChatContainer({
|
||||
initialPrompt,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
onStreamingChange?.(isStreaming);
|
||||
}, [isStreaming, onStreamingChange]);
|
||||
|
||||
const breakpoint = useBreakpoint();
|
||||
const isMobile =
|
||||
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
export function ChatLoader() {
|
||||
return (
|
||||
<Text
|
||||
variant="small"
|
||||
className="bg-gradient-to-r from-neutral-600 via-neutral-500 to-neutral-600 bg-[length:200%_100%] bg-clip-text text-xs text-transparent [animation:shimmer_2s_ease-in-out_infinite]"
|
||||
>
|
||||
Taking a bit more time...
|
||||
</Text>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="h-5 w-5 animate-loader rounded-full bg-black" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
ArrowsClockwiseIcon,
|
||||
CheckCircleIcon,
|
||||
CheckIcon,
|
||||
CopyIcon,
|
||||
} from "@phosphor-icons/react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useState } from "react";
|
||||
@@ -340,11 +339,26 @@ export function ChatMessage({
|
||||
size="icon"
|
||||
onClick={handleCopy}
|
||||
aria-label="Copy message"
|
||||
className="p-1"
|
||||
>
|
||||
{copied ? (
|
||||
<CheckIcon className="size-4 text-green-600" />
|
||||
) : (
|
||||
<CopyIcon className="size-4 text-zinc-600" />
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
width="24"
|
||||
height="24"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
className="size-3 text-zinc-600"
|
||||
>
|
||||
<rect width="14" height="14" x="8" y="8" rx="2" ry="2" />
|
||||
<path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2" />
|
||||
</svg>
|
||||
)}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { AIChatBubble } from "../AIChatBubble/AIChatBubble";
|
||||
import { ChatLoader } from "../ChatLoader/ChatLoader";
|
||||
|
||||
export interface ThinkingMessageProps {
|
||||
className?: string;
|
||||
@@ -9,7 +8,9 @@ export interface ThinkingMessageProps {
|
||||
|
||||
export function ThinkingMessage({ className }: ThinkingMessageProps) {
|
||||
const [showSlowLoader, setShowSlowLoader] = useState(false);
|
||||
const [showCoffeeMessage, setShowCoffeeMessage] = useState(false);
|
||||
const timerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const coffeeTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (timerRef.current === null) {
|
||||
@@ -18,11 +19,21 @@ export function ThinkingMessage({ className }: ThinkingMessageProps) {
|
||||
}, 8000);
|
||||
}
|
||||
|
||||
if (coffeeTimerRef.current === null) {
|
||||
coffeeTimerRef.current = setTimeout(() => {
|
||||
setShowCoffeeMessage(true);
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (timerRef.current) {
|
||||
clearTimeout(timerRef.current);
|
||||
timerRef.current = null;
|
||||
}
|
||||
if (coffeeTimerRef.current) {
|
||||
clearTimeout(coffeeTimerRef.current);
|
||||
coffeeTimerRef.current = null;
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
@@ -37,16 +48,16 @@ export function ThinkingMessage({ className }: ThinkingMessageProps) {
|
||||
<div className="flex min-w-0 flex-1 flex-col">
|
||||
<AIChatBubble>
|
||||
<div className="transition-all duration-500 ease-in-out">
|
||||
{showSlowLoader ? (
|
||||
<ChatLoader />
|
||||
{showCoffeeMessage ? (
|
||||
<span className="inline-block animate-shimmer bg-gradient-to-r from-neutral-400 via-neutral-600 to-neutral-400 bg-[length:200%_100%] bg-clip-text text-transparent">
|
||||
This could take a few minutes, grab a coffee ☕️
|
||||
</span>
|
||||
) : showSlowLoader ? (
|
||||
<span className="inline-block animate-shimmer bg-gradient-to-r from-neutral-400 via-neutral-600 to-neutral-400 bg-[length:200%_100%] bg-clip-text text-transparent">
|
||||
Taking a bit more time...
|
||||
</span>
|
||||
) : (
|
||||
<span
|
||||
className="inline-block bg-gradient-to-r from-neutral-400 via-neutral-600 to-neutral-400 bg-clip-text text-transparent"
|
||||
style={{
|
||||
backgroundSize: "200% 100%",
|
||||
animation: "shimmer 2s ease-in-out infinite",
|
||||
}}
|
||||
>
|
||||
<span className="inline-block animate-shimmer bg-gradient-to-r from-neutral-400 via-neutral-600 to-neutral-400 bg-[length:200%_100%] bg-clip-text text-transparent">
|
||||
Thinking...
|
||||
</span>
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
"use client";
|
||||
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { usePathname, useSearchParams } from "next/navigation";
|
||||
import posthog from "posthog-js";
|
||||
import { useEffect, useRef } from "react";
|
||||
|
||||
/**
|
||||
* PostHogUserTracker component identifies users in PostHog for analytics.
|
||||
* This component should be placed high in the component tree to ensure user
|
||||
* identification happens as soon as the user logs in.
|
||||
*
|
||||
* It automatically:
|
||||
* - Identifies the user when they log in (linking anonymous to authenticated)
|
||||
* - Resets PostHog when a user logs out
|
||||
* - Updates identification when user data changes
|
||||
*/
|
||||
export function PostHogUserTracker() {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const previousUserIdRef = useRef<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isUserLoading) return;
|
||||
|
||||
if (user) {
|
||||
// Only identify if we haven't already identified this user
|
||||
if (previousUserIdRef.current !== user.id) {
|
||||
posthog.identify(user.id, {
|
||||
email: user.email,
|
||||
...(user.user_metadata?.name && { name: user.user_metadata.name }),
|
||||
});
|
||||
previousUserIdRef.current = user.id;
|
||||
}
|
||||
} else if (previousUserIdRef.current !== null) {
|
||||
// User logged out - reset PostHog to generate new anonymous ID
|
||||
posthog.reset();
|
||||
previousUserIdRef.current = null;
|
||||
}
|
||||
}, [user, isUserLoading]);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* PostHogPageViewTracker captures page views on route changes in Next.js App Router.
|
||||
* The default PostHog capture_pageview only works for initial page loads.
|
||||
* This component ensures soft navigations (client-side route changes) are also tracked.
|
||||
*/
|
||||
export function PostHogPageViewTracker() {
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
|
||||
useEffect(() => {
|
||||
if (pathname) {
|
||||
let url = window.origin + pathname;
|
||||
if (searchParams && searchParams.toString()) {
|
||||
url = url + `?${searchParams.toString()}`;
|
||||
}
|
||||
posthog.capture("$pageview", { $current_url: url });
|
||||
}
|
||||
}, [pathname, searchParams]);
|
||||
|
||||
return null;
|
||||
}
|
||||
@@ -157,12 +157,21 @@ const config = {
|
||||
backgroundPosition: "-200% 0",
|
||||
},
|
||||
},
|
||||
loader: {
|
||||
"0%": {
|
||||
boxShadow: "0 0 0 0 rgba(0, 0, 0, 0.25)",
|
||||
},
|
||||
"100%": {
|
||||
boxShadow: "0 0 0 30px rgba(0, 0, 0, 0)",
|
||||
},
|
||||
},
|
||||
},
|
||||
animation: {
|
||||
"accordion-down": "accordion-down 0.2s ease-out",
|
||||
"accordion-up": "accordion-up 0.2s ease-out",
|
||||
"fade-in": "fade-in 0.2s ease-out",
|
||||
shimmer: "shimmer 2s ease-in-out infinite",
|
||||
loader: "loader 1s infinite",
|
||||
},
|
||||
transitionDuration: {
|
||||
"2000": "2000ms",
|
||||
|
||||
BIN
docs/integrations/.gitbook/assets/Ollama-Add-Prompts.png
Normal file
|
After Width: | Height: | Size: 115 KiB |
BIN
docs/integrations/.gitbook/assets/Ollama-Output.png
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
docs/integrations/.gitbook/assets/Ollama-Remote-Host.png
Normal file
|
After Width: | Height: | Size: 6.0 KiB |
BIN
docs/integrations/.gitbook/assets/Ollama-Select-Llama32.png
Normal file
|
After Width: | Height: | Size: 81 KiB |
BIN
docs/integrations/.gitbook/assets/Select-AI-block.png
Normal file
|
After Width: | Height: | Size: 116 KiB |
BIN
docs/integrations/.gitbook/assets/e2b-dashboard.png
Normal file
|
After Width: | Height: | Size: 504 KiB |
BIN
docs/integrations/.gitbook/assets/e2b-log-url.png
Normal file
|
After Width: | Height: | Size: 43 KiB |
BIN
docs/integrations/.gitbook/assets/e2b-new-tag.png
Normal file
|
After Width: | Height: | Size: 47 KiB |
BIN
docs/integrations/.gitbook/assets/e2b-tag-button.png
Normal file
|
After Width: | Height: | Size: 20 KiB |
BIN
docs/integrations/.gitbook/assets/get-repo-dialog.png
Normal file
|
After Width: | Height: | Size: 68 KiB |
133
docs/integrations/SUMMARY.md
Normal file
@@ -0,0 +1,133 @@
|
||||
# Table of contents
|
||||
|
||||
* [AutoGPT Blocks Overview](README.md)
|
||||
|
||||
## Guides
|
||||
|
||||
* [LLM Providers](guides/llm-providers.md)
|
||||
* [Voice Providers](guides/voice-providers.md)
|
||||
|
||||
## Block Integrations
|
||||
|
||||
* [Airtable Bases](block-integrations/airtable/bases.md)
|
||||
* [Airtable Records](block-integrations/airtable/records.md)
|
||||
* [Airtable Schema](block-integrations/airtable/schema.md)
|
||||
* [Airtable Triggers](block-integrations/airtable/triggers.md)
|
||||
* [Apollo Organization](block-integrations/apollo/organization.md)
|
||||
* [Apollo People](block-integrations/apollo/people.md)
|
||||
* [Apollo Person](block-integrations/apollo/person.md)
|
||||
* [Ayrshare Post To Bluesky](block-integrations/ayrshare/post_to_bluesky.md)
|
||||
* [Ayrshare Post To Facebook](block-integrations/ayrshare/post_to_facebook.md)
|
||||
* [Ayrshare Post To GMB](block-integrations/ayrshare/post_to_gmb.md)
|
||||
* [Ayrshare Post To Instagram](block-integrations/ayrshare/post_to_instagram.md)
|
||||
* [Ayrshare Post To LinkedIn](block-integrations/ayrshare/post_to_linkedin.md)
|
||||
* [Ayrshare Post To Pinterest](block-integrations/ayrshare/post_to_pinterest.md)
|
||||
* [Ayrshare Post To Reddit](block-integrations/ayrshare/post_to_reddit.md)
|
||||
* [Ayrshare Post To Snapchat](block-integrations/ayrshare/post_to_snapchat.md)
|
||||
* [Ayrshare Post To Telegram](block-integrations/ayrshare/post_to_telegram.md)
|
||||
* [Ayrshare Post To Threads](block-integrations/ayrshare/post_to_threads.md)
|
||||
* [Ayrshare Post To TikTok](block-integrations/ayrshare/post_to_tiktok.md)
|
||||
* [Ayrshare Post To X](block-integrations/ayrshare/post_to_x.md)
|
||||
* [Ayrshare Post To YouTube](block-integrations/ayrshare/post_to_youtube.md)
|
||||
* [Baas Bots](block-integrations/baas/bots.md)
|
||||
* [Bannerbear Text Overlay](block-integrations/bannerbear/text_overlay.md)
|
||||
* [Basic](block-integrations/basic.md)
|
||||
* [Compass Triggers](block-integrations/compass/triggers.md)
|
||||
* [Data](block-integrations/data.md)
|
||||
* [Dataforseo Keyword Suggestions](block-integrations/dataforseo/keyword_suggestions.md)
|
||||
* [Dataforseo Related Keywords](block-integrations/dataforseo/related_keywords.md)
|
||||
* [Discord Bot Blocks](block-integrations/discord/bot_blocks.md)
|
||||
* [Discord OAuth Blocks](block-integrations/discord/oauth_blocks.md)
|
||||
* [Enrichlayer LinkedIn](block-integrations/enrichlayer/linkedin.md)
|
||||
* [Exa Answers](block-integrations/exa/answers.md)
|
||||
* [Exa Code Context](block-integrations/exa/code_context.md)
|
||||
* [Exa Contents](block-integrations/exa/contents.md)
|
||||
* [Exa Research](block-integrations/exa/research.md)
|
||||
* [Exa Search](block-integrations/exa/search.md)
|
||||
* [Exa Similar](block-integrations/exa/similar.md)
|
||||
* [Exa Webhook Blocks](block-integrations/exa/webhook_blocks.md)
|
||||
* [Exa Websets](block-integrations/exa/websets.md)
|
||||
* [Exa Websets Enrichment](block-integrations/exa/websets_enrichment.md)
|
||||
* [Exa Websets Import Export](block-integrations/exa/websets_import_export.md)
|
||||
* [Exa Websets Items](block-integrations/exa/websets_items.md)
|
||||
* [Exa Websets Monitor](block-integrations/exa/websets_monitor.md)
|
||||
* [Exa Websets Polling](block-integrations/exa/websets_polling.md)
|
||||
* [Exa Websets Search](block-integrations/exa/websets_search.md)
|
||||
* [Fal AI Video Generator](block-integrations/fal/ai_video_generator.md)
|
||||
* [Firecrawl Crawl](block-integrations/firecrawl/crawl.md)
|
||||
* [Firecrawl Extract](block-integrations/firecrawl/extract.md)
|
||||
* [Firecrawl Map](block-integrations/firecrawl/map.md)
|
||||
* [Firecrawl Scrape](block-integrations/firecrawl/scrape.md)
|
||||
* [Firecrawl Search](block-integrations/firecrawl/search.md)
|
||||
* [Generic Webhook Triggers](block-integrations/generic_webhook/triggers.md)
|
||||
* [GitHub Checks](block-integrations/github/checks.md)
|
||||
* [GitHub CI](block-integrations/github/ci.md)
|
||||
* [GitHub Issues](block-integrations/github/issues.md)
|
||||
* [GitHub Pull Requests](block-integrations/github/pull_requests.md)
|
||||
* [GitHub Repo](block-integrations/github/repo.md)
|
||||
* [GitHub Reviews](block-integrations/github/reviews.md)
|
||||
* [GitHub Statuses](block-integrations/github/statuses.md)
|
||||
* [GitHub Triggers](block-integrations/github/triggers.md)
|
||||
* [Google Calendar](block-integrations/google/calendar.md)
|
||||
* [Google Docs](block-integrations/google/docs.md)
|
||||
* [Google Gmail](block-integrations/google/gmail.md)
|
||||
* [Google Sheets](block-integrations/google/sheets.md)
|
||||
* [HubSpot Company](block-integrations/hubspot/company.md)
|
||||
* [HubSpot Contact](block-integrations/hubspot/contact.md)
|
||||
* [HubSpot Engagement](block-integrations/hubspot/engagement.md)
|
||||
* [Jina Chunking](block-integrations/jina/chunking.md)
|
||||
* [Jina Embeddings](block-integrations/jina/embeddings.md)
|
||||
* [Jina Fact Checker](block-integrations/jina/fact_checker.md)
|
||||
* [Jina Search](block-integrations/jina/search.md)
|
||||
* [Linear Comment](block-integrations/linear/comment.md)
|
||||
* [Linear Issues](block-integrations/linear/issues.md)
|
||||
* [Linear Projects](block-integrations/linear/projects.md)
|
||||
* [LLM](block-integrations/llm.md)
|
||||
* [Logic](block-integrations/logic.md)
|
||||
* [Misc](block-integrations/misc.md)
|
||||
* [Multimedia](block-integrations/multimedia.md)
|
||||
* [Notion Create Page](block-integrations/notion/create_page.md)
|
||||
* [Notion Read Database](block-integrations/notion/read_database.md)
|
||||
* [Notion Read Page](block-integrations/notion/read_page.md)
|
||||
* [Notion Read Page Markdown](block-integrations/notion/read_page_markdown.md)
|
||||
* [Notion Search](block-integrations/notion/search.md)
|
||||
* [Nvidia Deepfake](block-integrations/nvidia/deepfake.md)
|
||||
* [Replicate Flux Advanced](block-integrations/replicate/flux_advanced.md)
|
||||
* [Replicate Replicate Block](block-integrations/replicate/replicate_block.md)
|
||||
* [Search](block-integrations/search.md)
|
||||
* [Slant3D Filament](block-integrations/slant3d/filament.md)
|
||||
* [Slant3D Order](block-integrations/slant3d/order.md)
|
||||
* [Slant3D Slicing](block-integrations/slant3d/slicing.md)
|
||||
* [Slant3D Webhook](block-integrations/slant3d/webhook.md)
|
||||
* [Smartlead Campaign](block-integrations/smartlead/campaign.md)
|
||||
* [Stagehand Blocks](block-integrations/stagehand/blocks.md)
|
||||
* [System Library Operations](block-integrations/system/library_operations.md)
|
||||
* [System Store Operations](block-integrations/system/store_operations.md)
|
||||
* [Text](block-integrations/text.md)
|
||||
* [Todoist Comments](block-integrations/todoist/comments.md)
|
||||
* [Todoist Labels](block-integrations/todoist/labels.md)
|
||||
* [Todoist Projects](block-integrations/todoist/projects.md)
|
||||
* [Todoist Sections](block-integrations/todoist/sections.md)
|
||||
* [Todoist Tasks](block-integrations/todoist/tasks.md)
|
||||
* [Twitter Blocks](block-integrations/twitter/blocks.md)
|
||||
* [Twitter Bookmark](block-integrations/twitter/bookmark.md)
|
||||
* [Twitter Follows](block-integrations/twitter/follows.md)
|
||||
* [Twitter Hide](block-integrations/twitter/hide.md)
|
||||
* [Twitter Like](block-integrations/twitter/like.md)
|
||||
* [Twitter List Follows](block-integrations/twitter/list_follows.md)
|
||||
* [Twitter List Lookup](block-integrations/twitter/list_lookup.md)
|
||||
* [Twitter List Members](block-integrations/twitter/list_members.md)
|
||||
* [Twitter List Tweets Lookup](block-integrations/twitter/list_tweets_lookup.md)
|
||||
* [Twitter Manage](block-integrations/twitter/manage.md)
|
||||
* [Twitter Manage Lists](block-integrations/twitter/manage_lists.md)
|
||||
* [Twitter Mutes](block-integrations/twitter/mutes.md)
|
||||
* [Twitter Pinned Lists](block-integrations/twitter/pinned_lists.md)
|
||||
* [Twitter Quote](block-integrations/twitter/quote.md)
|
||||
* [Twitter Retweet](block-integrations/twitter/retweet.md)
|
||||
* [Twitter Search Spaces](block-integrations/twitter/search_spaces.md)
|
||||
* [Twitter Spaces Lookup](block-integrations/twitter/spaces_lookup.md)
|
||||
* [Twitter Timeline](block-integrations/twitter/timeline.md)
|
||||
* [Twitter Tweet Lookup](block-integrations/twitter/tweet_lookup.md)
|
||||
* [Twitter User Lookup](block-integrations/twitter/user_lookup.md)
|
||||
* [Wolfram LLM API](block-integrations/wolfram/llm_api.md)
|
||||
* [Zerobounce Validate Emails](block-integrations/zerobounce/validate_emails.md)
|
||||
67
docs/integrations/block-integrations/claude_code.md
Normal file
@@ -0,0 +1,67 @@
|
||||
# Claude Code Execution
|
||||
|
||||
## What it is
|
||||
The Claude Code block executes complex coding tasks using Anthropic's Claude Code AI assistant in a secure E2B sandbox environment.
|
||||
|
||||
## What it does
|
||||
This block allows you to delegate coding tasks to Claude Code, which can autonomously create files, install packages, run commands, and build complete applications within a sandboxed environment. Claude Code can handle multi-step development tasks and maintain conversation context across multiple turns.
|
||||
|
||||
## How it works
|
||||
When activated, the block:
|
||||
1. Creates or connects to an E2B sandbox (a secure, isolated Linux environment)
|
||||
2. Installs the latest version of Claude Code in the sandbox
|
||||
3. Optionally runs setup commands to prepare the environment
|
||||
4. Executes your prompt using Claude Code, which can:
|
||||
- Create and edit files
|
||||
- Install dependencies (npm, pip, etc.)
|
||||
- Run terminal commands
|
||||
- Build and test applications
|
||||
5. Extracts all text files created/modified during execution
|
||||
6. Returns the response and files, optionally keeping the sandbox alive for follow-up tasks
|
||||
|
||||
The block supports conversation continuation through three mechanisms:
|
||||
- **Same sandbox continuation** (via `session_id` + `sandbox_id`): Resume on the same live sandbox
|
||||
- **Fresh sandbox continuation** (via `conversation_history`): Restore context on a new sandbox if the previous one timed out
|
||||
- **Dispose control** (`dispose_sandbox` flag): Keep sandbox alive for multi-turn conversations
|
||||
|
||||
## Inputs
|
||||
| Input | Description |
|
||||
|-------|-------------|
|
||||
| E2B Credentials | API key for the E2B platform to create the sandbox. Get one at [e2b.dev](https://e2b.dev/docs) |
|
||||
| Anthropic Credentials | API key for Anthropic to power Claude Code. Get one at [Anthropic's website](https://console.anthropic.com) |
|
||||
| Prompt | The task or instruction for Claude Code to execute. Claude Code can create files, install packages, run commands, and perform complex coding tasks |
|
||||
| Timeout | Sandbox timeout in seconds (default: 300). Set higher for complex tasks. Note: Only applies when creating a new sandbox |
|
||||
| Setup Commands | Optional shell commands to run before executing Claude Code (e.g., installing dependencies) |
|
||||
| Working Directory | Working directory for Claude Code to operate in (default: /home/user) |
|
||||
| Session ID | Session ID to resume a previous conversation. Leave empty for new conversations |
|
||||
| Sandbox ID | Sandbox ID to reconnect to an existing sandbox. Required when resuming a session |
|
||||
| Conversation History | Previous conversation history to restore context on a fresh sandbox if the previous one timed out |
|
||||
| Dispose Sandbox | Whether to dispose of the sandbox after execution (default: true). Set to false to continue conversations later |
|
||||
|
||||
## Outputs
|
||||
| Output | Description |
|
||||
|--------|-------------|
|
||||
| Response | The output/response from Claude Code execution |
|
||||
| Files | List of text files created/modified during execution. Each file includes path, relative_path, name, and content fields |
|
||||
| Conversation History | Full conversation history including this turn. Use to restore context on a fresh sandbox |
|
||||
| Session ID | Session ID for this conversation. Pass back with sandbox_id to continue the conversation |
|
||||
| Sandbox ID | ID of the sandbox instance (null if disposed). Pass back with session_id to continue the conversation |
|
||||
| Error | Error message if execution failed |
|
||||
|
||||
## Possible use case
|
||||
**API Documentation to Full Application:**
|
||||
A product team wants to quickly prototype applications based on API documentation. They create an agent that:
|
||||
1. Uses Firecrawl to fetch API documentation from a URL
|
||||
2. Passes the docs to Claude Code with a prompt like "Create a web app that demonstrates all the key features of this API"
|
||||
3. Claude Code builds a complete application with HTML/CSS/JS frontend, proper error handling, and example API calls
|
||||
4. The Files output is used with GitHub blocks to push the generated code to a new repository
|
||||
|
||||
The team can then iterate on the application by passing the sandbox_id and session_id back to Claude Code with refinement requests like "Add authentication" or "Improve the UI", and Claude Code will modify the existing files in the same sandbox.
|
||||
|
||||
**Multi-turn Development:**
|
||||
A developer uses Claude Code to scaffold a new project:
|
||||
- Turn 1: "Create a Python FastAPI project with user authentication" (dispose_sandbox=false)
|
||||
- Turn 2: Uses the returned session_id + sandbox_id to ask "Add rate limiting middleware"
|
||||
- Turn 3: Continues with "Add comprehensive tests"
|
||||
|
||||
Each turn builds on the previous work in the same sandbox environment.
|
||||