mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-11 16:18:07 -05:00
Compare commits
46 Commits
fix/sentry
...
fix/accoun
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc85a37305 | ||
|
|
8daec53230 | ||
|
|
ec6f593edc | ||
|
|
e6ed83462d | ||
|
|
1851264a6a | ||
|
|
8f25d43089 | ||
|
|
0c435c4afa | ||
|
|
18002cb8f0 | ||
|
|
240a65e7b3 | ||
|
|
07368468a4 | ||
|
|
52aac09577 | ||
|
|
64a775dfa7 | ||
|
|
5d97706bb8 | ||
|
|
244f3c7c71 | ||
|
|
355219acbd | ||
|
|
1ab66eaed4 | ||
|
|
126d5838a0 | ||
|
|
643aea849b | ||
|
|
3b092f34d8 | ||
|
|
0921d23628 | ||
|
|
0edc669874 | ||
|
|
e64d3d9b99 | ||
|
|
41dc39b97d | ||
|
|
80e573f33b | ||
|
|
06d20e7e4c | ||
|
|
07b5fe859a | ||
|
|
746dbbac84 | ||
|
|
901bb31e14 | ||
|
|
9438817702 | ||
|
|
184a73de7d | ||
|
|
1154f86a5c | ||
|
|
73c93cf554 | ||
|
|
02757d68f3 | ||
|
|
2569576d78 | ||
|
|
3b34c04a7a | ||
|
|
34c9ecf6bc | ||
|
|
a66219fc1f | ||
|
|
8b3a741f60 | ||
|
|
7c48598f44 | ||
|
|
804e3b403a | ||
|
|
9c3f679f30 | ||
|
|
9977144b3d | ||
|
|
81d61a0c94 | ||
|
|
e1e0fb7b25 | ||
|
|
a054740aac | ||
|
|
f78a6df96c |
2
.github/workflows/claude-dependabot.yml
vendored
2
.github/workflows/claude-dependabot.yml
vendored
@@ -80,7 +80,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -90,7 +90,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
4
.github/workflows/copilot-setup-steps.yml
vendored
4
.github/workflows/copilot-setup-steps.yml
vendored
@@ -78,7 +78,7 @@ jobs:
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
@@ -299,4 +299,4 @@ jobs:
|
||||
echo "✅ AutoGPT Platform development environment setup complete!"
|
||||
echo "🚀 Ready for development with Docker services running"
|
||||
echo "📝 Backend server: poetry run serve (port 8000)"
|
||||
echo "🌐 Frontend server: pnpm dev (port 3000)"
|
||||
echo "🌐 Frontend server: pnpm dev (port 3000)"
|
||||
|
||||
@@ -134,13 +134,6 @@ POSTMARK_WEBHOOK_TOKEN=
|
||||
# Error Tracking
|
||||
SENTRY_DSN=
|
||||
|
||||
# Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
# Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
# This is the backend secret key
|
||||
TURNSTILE_SECRET_KEY=
|
||||
# This is the verify URL
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
# Feature Flags
|
||||
LAUNCH_DARKLY_SDK_KEY=
|
||||
|
||||
|
||||
@@ -24,6 +24,7 @@ from backend.util.file import MediaFileType
|
||||
|
||||
class GeminiImageModel(str, Enum):
|
||||
NANO_BANANA = "google/nano-banana"
|
||||
NANO_BANANA_PRO = "google/nano-banana-pro"
|
||||
|
||||
|
||||
class OutputFormat(str, Enum):
|
||||
|
||||
@@ -60,6 +60,14 @@ SIZE_TO_RECRAFT_DIMENSIONS = {
|
||||
ImageSize.TALL: "1024x1536",
|
||||
}
|
||||
|
||||
SIZE_TO_NANO_BANANA_RATIO = {
|
||||
ImageSize.SQUARE: "1:1",
|
||||
ImageSize.LANDSCAPE: "4:3",
|
||||
ImageSize.PORTRAIT: "3:4",
|
||||
ImageSize.WIDE: "16:9",
|
||||
ImageSize.TALL: "9:16",
|
||||
}
|
||||
|
||||
|
||||
class ImageStyle(str, Enum):
|
||||
"""
|
||||
@@ -98,6 +106,7 @@ class ImageGenModel(str, Enum):
|
||||
FLUX_ULTRA = "Flux 1.1 Pro Ultra"
|
||||
RECRAFT = "Recraft v3"
|
||||
SD3_5 = "Stable Diffusion 3.5 Medium"
|
||||
NANO_BANANA_PRO = "Nano Banana Pro"
|
||||
|
||||
|
||||
class AIImageGeneratorBlock(Block):
|
||||
@@ -261,6 +270,20 @@ class AIImageGeneratorBlock(Block):
|
||||
)
|
||||
return output
|
||||
|
||||
elif input_data.model == ImageGenModel.NANO_BANANA_PRO:
|
||||
# Use Nano Banana Pro (Google Gemini 3 Pro Image)
|
||||
input_params = {
|
||||
"prompt": modified_prompt,
|
||||
"aspect_ratio": SIZE_TO_NANO_BANANA_RATIO[input_data.size],
|
||||
"resolution": "2K", # Default to 2K for good quality/cost balance
|
||||
"output_format": "jpg",
|
||||
"safety_filter_level": "block_only_high", # Most permissive
|
||||
}
|
||||
output = await self._run_client(
|
||||
credentials, "google/nano-banana-pro", input_params
|
||||
)
|
||||
return output
|
||||
|
||||
except Exception as e:
|
||||
raise RuntimeError(f"Failed to generate image: {str(e)}")
|
||||
|
||||
|
||||
224
autogpt_platform/backend/backend/blocks/codex.py
Normal file
224
autogpt_platform/backend/backend/blocks/codex.py
Normal file
@@ -0,0 +1,224 @@
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Literal
|
||||
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.responses import Response as OpenAIResponse
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchemaInput,
|
||||
BlockSchemaOutput,
|
||||
)
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
NodeExecutionStats,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
|
||||
@dataclass
|
||||
class CodexCallResult:
|
||||
"""Structured response returned by Codex invocations."""
|
||||
|
||||
response: str
|
||||
reasoning: str
|
||||
response_id: str
|
||||
|
||||
|
||||
class CodexModel(str, Enum):
|
||||
"""Codex-capable OpenAI models."""
|
||||
|
||||
GPT5_1_CODEX = "gpt-5.1-codex"
|
||||
|
||||
|
||||
class CodexReasoningEffort(str, Enum):
|
||||
"""Configuration for the Responses API reasoning effort."""
|
||||
|
||||
NONE = "none"
|
||||
LOW = "low"
|
||||
MEDIUM = "medium"
|
||||
HIGH = "high"
|
||||
|
||||
|
||||
CodexCredentials = CredentialsMetaInput[
|
||||
Literal[ProviderName.OPENAI], Literal["api_key"]
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="e2fcb203-3f2d-4ad4-a344-8df3bc7db36b",
|
||||
provider="openai",
|
||||
api_key=SecretStr("mock-openai-api-key"),
|
||||
title="Mock OpenAI API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
def CodexCredentialsField() -> CodexCredentials:
|
||||
return CredentialsField(
|
||||
description="OpenAI API key with access to Codex models (Responses API).",
|
||||
)
|
||||
|
||||
|
||||
class CodeGenerationBlock(Block):
|
||||
"""Block that talks to Codex models via the OpenAI Responses API."""
|
||||
|
||||
class Input(BlockSchemaInput):
|
||||
prompt: str = SchemaField(
|
||||
description="Primary coding request passed to the Codex model.",
|
||||
placeholder="Generate a Python function that reverses a list.",
|
||||
)
|
||||
system_prompt: str = SchemaField(
|
||||
title="System Prompt",
|
||||
default=(
|
||||
"You are Codex, an elite software engineer. "
|
||||
"Favor concise, working code and highlight important caveats."
|
||||
),
|
||||
description="Optional instructions injected via the Responses API instructions field.",
|
||||
advanced=True,
|
||||
)
|
||||
model: CodexModel = SchemaField(
|
||||
title="Codex Model",
|
||||
default=CodexModel.GPT5_1_CODEX,
|
||||
description="Codex-optimized model served via the Responses API.",
|
||||
advanced=False,
|
||||
)
|
||||
reasoning_effort: CodexReasoningEffort = SchemaField(
|
||||
title="Reasoning Effort",
|
||||
default=CodexReasoningEffort.MEDIUM,
|
||||
description="Controls the Responses API reasoning budget. Select 'none' to skip reasoning configs.",
|
||||
advanced=True,
|
||||
)
|
||||
max_output_tokens: int | None = SchemaField(
|
||||
title="Max Output Tokens",
|
||||
default=2048,
|
||||
description="Upper bound for generated tokens (hard limit 128,000). Leave blank to let OpenAI decide.",
|
||||
advanced=True,
|
||||
)
|
||||
credentials: CodexCredentials = CodexCredentialsField()
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
response: str = SchemaField(
|
||||
description="Code-focused response returned by the Codex model."
|
||||
)
|
||||
reasoning: str = SchemaField(
|
||||
description="Reasoning summary returned by the model, if available.",
|
||||
default="",
|
||||
)
|
||||
response_id: str = SchemaField(
|
||||
description="ID of the Responses API call for auditing/debugging.",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="86a2a099-30df-47b4-b7e4-34ae5f83e0d5",
|
||||
description="Generate or refactor code using OpenAI's Codex (Responses API).",
|
||||
categories={BlockCategory.AI, BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=CodeGenerationBlock.Input,
|
||||
output_schema=CodeGenerationBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"prompt": "Write a TypeScript function that deduplicates an array.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
}
|
||||
],
|
||||
test_output=[
|
||||
("response", str),
|
||||
("reasoning", str),
|
||||
("response_id", str),
|
||||
],
|
||||
test_mock={
|
||||
"call_codex": lambda *_args, **_kwargs: CodexCallResult(
|
||||
response="function dedupe<T>(items: T[]): T[] { return [...new Set(items)]; }",
|
||||
reasoning="Used Set to remove duplicates in O(n).",
|
||||
response_id="resp_test",
|
||||
)
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
self.execution_stats = NodeExecutionStats()
|
||||
|
||||
async def call_codex(
|
||||
self,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
model: CodexModel,
|
||||
prompt: str,
|
||||
system_prompt: str,
|
||||
max_output_tokens: int | None,
|
||||
reasoning_effort: CodexReasoningEffort,
|
||||
) -> CodexCallResult:
|
||||
"""Invoke the OpenAI Responses API."""
|
||||
client = AsyncOpenAI(api_key=credentials.api_key.get_secret_value())
|
||||
|
||||
request_payload: dict[str, Any] = {
|
||||
"model": model.value,
|
||||
"input": prompt,
|
||||
}
|
||||
if system_prompt:
|
||||
request_payload["instructions"] = system_prompt
|
||||
if max_output_tokens is not None:
|
||||
request_payload["max_output_tokens"] = max_output_tokens
|
||||
if reasoning_effort != CodexReasoningEffort.NONE:
|
||||
request_payload["reasoning"] = {"effort": reasoning_effort.value}
|
||||
|
||||
response = await client.responses.create(**request_payload)
|
||||
if not isinstance(response, OpenAIResponse):
|
||||
raise TypeError(f"Expected OpenAIResponse, got {type(response).__name__}")
|
||||
|
||||
# Extract data directly from typed response
|
||||
text_output = response.output_text or ""
|
||||
reasoning_summary = (
|
||||
str(response.reasoning.summary)
|
||||
if response.reasoning and response.reasoning.summary
|
||||
else ""
|
||||
)
|
||||
response_id = response.id or ""
|
||||
|
||||
# Update usage stats
|
||||
self.execution_stats.input_token_count = (
|
||||
response.usage.input_tokens if response.usage else 0
|
||||
)
|
||||
self.execution_stats.output_token_count = (
|
||||
response.usage.output_tokens if response.usage else 0
|
||||
)
|
||||
self.execution_stats.llm_call_count += 1
|
||||
|
||||
return CodexCallResult(
|
||||
response=text_output,
|
||||
reasoning=reasoning_summary,
|
||||
response_id=response_id,
|
||||
)
|
||||
|
||||
async def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
**_kwargs,
|
||||
) -> BlockOutput:
|
||||
result = await self.call_codex(
|
||||
credentials=credentials,
|
||||
model=input_data.model,
|
||||
prompt=input_data.prompt,
|
||||
system_prompt=input_data.system_prompt,
|
||||
max_output_tokens=input_data.max_output_tokens,
|
||||
reasoning_effort=input_data.reasoning_effort,
|
||||
)
|
||||
|
||||
yield "response", result.response
|
||||
yield "reasoning", result.reasoning
|
||||
yield "response_id", result.response_id
|
||||
@@ -1,4 +1,6 @@
|
||||
import smtplib
|
||||
import socket
|
||||
import ssl
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from typing import Literal
|
||||
@@ -48,9 +50,7 @@ def SMTPCredentialsField() -> SMTPCredentialsInput:
|
||||
|
||||
|
||||
class SMTPConfig(BaseModel):
|
||||
smtp_server: str = SchemaField(
|
||||
default="smtp.example.com", description="SMTP server address"
|
||||
)
|
||||
smtp_server: str = SchemaField(description="SMTP server address")
|
||||
smtp_port: int = SchemaField(default=25, description="SMTP port number")
|
||||
|
||||
model_config = ConfigDict(title="SMTP Config")
|
||||
@@ -67,10 +67,7 @@ class SendEmailBlock(Block):
|
||||
body: str = SchemaField(
|
||||
description="Body of the email", placeholder="Enter the email body"
|
||||
)
|
||||
config: SMTPConfig = SchemaField(
|
||||
description="SMTP Config",
|
||||
default=SMTPConfig(),
|
||||
)
|
||||
config: SMTPConfig = SchemaField(description="SMTP Config")
|
||||
credentials: SMTPCredentialsInput = SMTPCredentialsField()
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
@@ -120,7 +117,7 @@ class SendEmailBlock(Block):
|
||||
msg["Subject"] = subject
|
||||
msg.attach(MIMEText(body, "plain"))
|
||||
|
||||
with smtplib.SMTP(smtp_server, smtp_port) as server:
|
||||
with smtplib.SMTP(smtp_server, smtp_port, timeout=30) as server:
|
||||
server.starttls()
|
||||
server.login(smtp_username, smtp_password)
|
||||
server.sendmail(smtp_username, to_email, msg.as_string())
|
||||
@@ -130,10 +127,59 @@ class SendEmailBlock(Block):
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: SMTPCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
yield "status", self.send_email(
|
||||
config=input_data.config,
|
||||
to_email=input_data.to_email,
|
||||
subject=input_data.subject,
|
||||
body=input_data.body,
|
||||
credentials=credentials,
|
||||
)
|
||||
try:
|
||||
status = self.send_email(
|
||||
config=input_data.config,
|
||||
to_email=input_data.to_email,
|
||||
subject=input_data.subject,
|
||||
body=input_data.body,
|
||||
credentials=credentials,
|
||||
)
|
||||
yield "status", status
|
||||
except socket.gaierror:
|
||||
yield "error", (
|
||||
f"Cannot connect to SMTP server '{input_data.config.smtp_server}'. "
|
||||
"Please verify the server address is correct."
|
||||
)
|
||||
except socket.timeout:
|
||||
yield "error", (
|
||||
f"Connection timeout to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"The server may be down or unreachable."
|
||||
)
|
||||
except ConnectionRefusedError:
|
||||
yield "error", (
|
||||
f"Connection refused to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"Common SMTP ports are: 587 (TLS), 465 (SSL), 25 (plain). "
|
||||
"Please verify the port is correct."
|
||||
)
|
||||
except smtplib.SMTPNotSupportedError:
|
||||
yield "error", (
|
||||
f"STARTTLS not supported by server '{input_data.config.smtp_server}'. "
|
||||
"Try using port 465 for SSL or port 25 for unencrypted connection."
|
||||
)
|
||||
except ssl.SSLError as e:
|
||||
yield "error", (
|
||||
f"SSL/TLS error when connecting to '{input_data.config.smtp_server}': {str(e)}. "
|
||||
"The server may require a different security protocol."
|
||||
)
|
||||
except smtplib.SMTPAuthenticationError:
|
||||
yield "error", (
|
||||
"Authentication failed. Please verify your username and password are correct."
|
||||
)
|
||||
except smtplib.SMTPRecipientsRefused:
|
||||
yield "error", (
|
||||
f"Recipient email address '{input_data.to_email}' was rejected by the server. "
|
||||
"Please verify the email address is valid."
|
||||
)
|
||||
except smtplib.SMTPSenderRefused:
|
||||
yield "error", (
|
||||
"Sender email address defined in the credentials that where used"
|
||||
"was rejected by the server. "
|
||||
"Please verify your account is authorized to send emails."
|
||||
)
|
||||
except smtplib.SMTPDataError as e:
|
||||
yield "error", f"Email data rejected by server: {str(e)}"
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
@@ -265,3 +265,68 @@ class LinearClient:
|
||||
return [Issue(**issue) for issue in issues["searchIssues"]["nodes"]]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
async def try_get_issues(
|
||||
self, project: str, status: str, is_assigned: bool, include_comments: bool
|
||||
) -> list[Issue]:
|
||||
try:
|
||||
query = """
|
||||
query IssuesByProjectStatusAndAssignee(
|
||||
$projectName: String!
|
||||
$statusName: String!
|
||||
$isAssigned: Boolean!
|
||||
$includeComments: Boolean! = false
|
||||
) {
|
||||
issues(
|
||||
filter: {
|
||||
project: { name: { eq: $projectName } }
|
||||
state: { name: { eq: $statusName } }
|
||||
assignee: { null: $isAssigned }
|
||||
}
|
||||
) {
|
||||
nodes {
|
||||
id
|
||||
title
|
||||
identifier
|
||||
description
|
||||
createdAt
|
||||
priority
|
||||
assignee {
|
||||
id
|
||||
name
|
||||
}
|
||||
project {
|
||||
id
|
||||
name
|
||||
}
|
||||
state {
|
||||
id
|
||||
name
|
||||
}
|
||||
comments @include(if: $includeComments) {
|
||||
nodes {
|
||||
id
|
||||
body
|
||||
createdAt
|
||||
user {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"projectName": project,
|
||||
"statusName": status,
|
||||
"isAssigned": not is_assigned,
|
||||
"includeComments": include_comments,
|
||||
}
|
||||
|
||||
issues = await self.query(query, variables)
|
||||
return [Issue(**issue) for issue in issues["issues"]["nodes"]]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
@@ -203,3 +203,106 @@ class LinearSearchIssuesBlock(Block):
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
|
||||
|
||||
class LinearGetProjectIssuesBlock(Block):
|
||||
"""Block for getting issues from a Linear project filtered by status and assignee"""
|
||||
|
||||
class Input(BlockSchemaInput):
|
||||
credentials: CredentialsMetaInput = linear.credentials_field(
|
||||
description="Linear credentials with read permissions",
|
||||
required_scopes={LinearScope.READ},
|
||||
)
|
||||
project: str = SchemaField(description="Name of the project to get issues from")
|
||||
status: str = SchemaField(
|
||||
description="Status/state name to filter issues by (e.g., 'In Progress', 'Done')"
|
||||
)
|
||||
is_assigned: bool = SchemaField(
|
||||
description="Filter by assignee status - True to get assigned issues, False to get unassigned issues",
|
||||
default=False,
|
||||
)
|
||||
include_comments: bool = SchemaField(
|
||||
description="Whether to include comments in the response",
|
||||
default=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
issues: list[Issue] = SchemaField(
|
||||
description="List of issues matching the criteria"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c7d3f1e8-45a9-4b2c-9f81-3e6a8d7c5b1a",
|
||||
description="Gets issues from a Linear project filtered by status and assignee",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"project": "Test Project",
|
||||
"status": "In Progress",
|
||||
"is_assigned": False,
|
||||
"include_comments": False,
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[
|
||||
(
|
||||
"issues",
|
||||
[
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="TST-123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_project_issues": lambda *args, **kwargs: [
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="TST-123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def get_project_issues(
|
||||
credentials: OAuth2Credentials | APIKeyCredentials,
|
||||
project: str,
|
||||
status: str,
|
||||
is_assigned: bool,
|
||||
include_comments: bool,
|
||||
) -> list[Issue]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: list[Issue] = await client.try_get_issues(
|
||||
project=project,
|
||||
status=status,
|
||||
is_assigned=is_assigned,
|
||||
include_comments=include_comments,
|
||||
)
|
||||
return response
|
||||
|
||||
async def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: OAuth2Credentials | APIKeyCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Execute getting project issues"""
|
||||
issues = await self.get_project_issues(
|
||||
credentials=credentials,
|
||||
project=input_data.project,
|
||||
status=input_data.status,
|
||||
is_assigned=input_data.is_assigned,
|
||||
include_comments=input_data.include_comments,
|
||||
)
|
||||
yield "issues", issues
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
from backend.sdk import BaseModel
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
|
||||
|
||||
class Comment(BaseModel):
|
||||
id: str
|
||||
body: str
|
||||
createdAt: str | None = None
|
||||
user: User | None = None
|
||||
|
||||
|
||||
class CreateCommentInput(BaseModel):
|
||||
@@ -20,22 +27,26 @@ class CreateCommentResponseWrapper(BaseModel):
|
||||
commentCreate: CreateCommentResponse
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str | None = None
|
||||
priority: int | None = None
|
||||
progress: float | None = None
|
||||
content: str | None = None
|
||||
|
||||
|
||||
class Issue(BaseModel):
|
||||
id: str
|
||||
identifier: str
|
||||
title: str
|
||||
description: str | None
|
||||
priority: int
|
||||
project: Project | None = None
|
||||
createdAt: str | None = None
|
||||
comments: list[Comment] | None = None
|
||||
assignee: User | None = None
|
||||
|
||||
|
||||
class CreateIssueResponse(BaseModel):
|
||||
issue: Issue
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
priority: int
|
||||
progress: float
|
||||
content: str | None
|
||||
|
||||
@@ -93,6 +93,7 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
O1_MINI = "o1-mini"
|
||||
# GPT-5 models
|
||||
GPT5 = "gpt-5-2025-08-07"
|
||||
GPT5_1 = "gpt-5.1-2025-11-13"
|
||||
GPT5_MINI = "gpt-5-mini-2025-08-07"
|
||||
GPT5_NANO = "gpt-5-nano-2025-08-07"
|
||||
GPT5_CHAT = "gpt-5-chat-latest"
|
||||
@@ -106,6 +107,7 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
CLAUDE_4_1_OPUS = "claude-opus-4-1-20250805"
|
||||
CLAUDE_4_OPUS = "claude-opus-4-20250514"
|
||||
CLAUDE_4_SONNET = "claude-sonnet-4-20250514"
|
||||
CLAUDE_4_5_OPUS = "claude-opus-4-5-20251101"
|
||||
CLAUDE_4_5_SONNET = "claude-sonnet-4-5-20250929"
|
||||
CLAUDE_4_5_HAIKU = "claude-haiku-4-5-20251001"
|
||||
CLAUDE_3_7_SONNET = "claude-3-7-sonnet-20250219"
|
||||
@@ -129,6 +131,7 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
OPENAI_GPT_OSS_120B = "openai/gpt-oss-120b"
|
||||
OPENAI_GPT_OSS_20B = "openai/gpt-oss-20b"
|
||||
GEMINI_2_5_PRO = "google/gemini-2.5-pro-preview-03-25"
|
||||
GEMINI_3_PRO_PREVIEW = "google/gemini-3-pro-preview"
|
||||
GEMINI_2_5_FLASH = "google/gemini-2.5-flash"
|
||||
GEMINI_2_0_FLASH = "google/gemini-2.0-flash-001"
|
||||
GEMINI_2_5_FLASH_LITE_PREVIEW = "google/gemini-2.5-flash-lite-preview-06-17"
|
||||
@@ -151,6 +154,9 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
META_LLAMA_4_SCOUT = "meta-llama/llama-4-scout"
|
||||
META_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick"
|
||||
GROK_4 = "x-ai/grok-4"
|
||||
GROK_4_FAST = "x-ai/grok-4-fast"
|
||||
GROK_4_1_FAST = "x-ai/grok-4.1-fast"
|
||||
GROK_CODE_FAST_1 = "x-ai/grok-code-fast-1"
|
||||
KIMI_K2 = "moonshotai/kimi-k2"
|
||||
QWEN3_235B_A22B_THINKING = "qwen/qwen3-235b-a22b-thinking-2507"
|
||||
QWEN3_CODER = "qwen/qwen3-coder"
|
||||
@@ -189,6 +195,7 @@ MODEL_METADATA = {
|
||||
LlmModel.O1_MINI: ModelMetadata("openai", 128000, 65536), # o1-mini-2024-09-12
|
||||
# GPT-5 models
|
||||
LlmModel.GPT5: ModelMetadata("openai", 400000, 128000),
|
||||
LlmModel.GPT5_1: ModelMetadata("openai", 400000, 128000),
|
||||
LlmModel.GPT5_MINI: ModelMetadata("openai", 400000, 128000),
|
||||
LlmModel.GPT5_NANO: ModelMetadata("openai", 400000, 128000),
|
||||
LlmModel.GPT5_CHAT: ModelMetadata("openai", 400000, 16384),
|
||||
@@ -212,6 +219,9 @@ MODEL_METADATA = {
|
||||
LlmModel.CLAUDE_4_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 64000
|
||||
), # claude-4-sonnet-20250514
|
||||
LlmModel.CLAUDE_4_5_OPUS: ModelMetadata(
|
||||
"anthropic", 200000, 64000
|
||||
), # claude-opus-4-5-20251101
|
||||
LlmModel.CLAUDE_4_5_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 64000
|
||||
), # claude-sonnet-4-5-20250929
|
||||
@@ -241,6 +251,7 @@ MODEL_METADATA = {
|
||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768, None),
|
||||
# https://openrouter.ai/models
|
||||
LlmModel.GEMINI_2_5_PRO: ModelMetadata("open_router", 1050000, 8192),
|
||||
LlmModel.GEMINI_3_PRO_PREVIEW: ModelMetadata("open_router", 1048576, 65535),
|
||||
LlmModel.GEMINI_2_5_FLASH: ModelMetadata("open_router", 1048576, 65535),
|
||||
LlmModel.GEMINI_2_0_FLASH: ModelMetadata("open_router", 1048576, 8192),
|
||||
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: ModelMetadata(
|
||||
@@ -252,12 +263,12 @@ MODEL_METADATA = {
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 128000, 4096),
|
||||
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 64000, 2048),
|
||||
LlmModel.DEEPSEEK_R1_0528: ModelMetadata("open_router", 163840, 163840),
|
||||
LlmModel.PERPLEXITY_SONAR: ModelMetadata("open_router", 127000, 127000),
|
||||
LlmModel.PERPLEXITY_SONAR: ModelMetadata("open_router", 127000, 8000),
|
||||
LlmModel.PERPLEXITY_SONAR_PRO: ModelMetadata("open_router", 200000, 8000),
|
||||
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: ModelMetadata(
|
||||
"open_router",
|
||||
128000,
|
||||
128000,
|
||||
16000,
|
||||
),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
|
||||
"open_router", 131000, 4096
|
||||
@@ -275,6 +286,9 @@ MODEL_METADATA = {
|
||||
LlmModel.META_LLAMA_4_SCOUT: ModelMetadata("open_router", 131072, 131072),
|
||||
LlmModel.META_LLAMA_4_MAVERICK: ModelMetadata("open_router", 1048576, 1000000),
|
||||
LlmModel.GROK_4: ModelMetadata("open_router", 256000, 256000),
|
||||
LlmModel.GROK_4_FAST: ModelMetadata("open_router", 2000000, 30000),
|
||||
LlmModel.GROK_4_1_FAST: ModelMetadata("open_router", 2000000, 30000),
|
||||
LlmModel.GROK_CODE_FAST_1: ModelMetadata("open_router", 256000, 10000),
|
||||
LlmModel.KIMI_K2: ModelMetadata("open_router", 131000, 131000),
|
||||
LlmModel.QWEN3_235B_A22B_THINKING: ModelMetadata("open_router", 262144, 262144),
|
||||
LlmModel.QWEN3_CODER: ModelMetadata("open_router", 262144, 262144),
|
||||
@@ -797,7 +811,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
default="",
|
||||
description="The system prompt to provide additional context to the model.",
|
||||
)
|
||||
conversation_history: list[dict] = SchemaField(
|
||||
conversation_history: list[dict] | None = SchemaField(
|
||||
default_factory=list,
|
||||
description="The conversation history to provide context for the prompt.",
|
||||
)
|
||||
@@ -904,7 +918,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
logger.debug(f"Calling LLM with input data: {input_data}")
|
||||
prompt = [json.to_dict(p) for p in input_data.conversation_history]
|
||||
prompt = [json.to_dict(p) for p in input_data.conversation_history or [] if p]
|
||||
|
||||
values = input_data.prompt_values
|
||||
if values:
|
||||
|
||||
@@ -121,13 +121,16 @@ def _convert_raw_response_to_dict(raw_response: Any) -> dict[str, Any]:
|
||||
return json.to_dict(raw_response)
|
||||
|
||||
|
||||
def get_pending_tool_calls(conversation_history: list[Any]) -> dict[str, int]:
|
||||
def get_pending_tool_calls(conversation_history: list[Any] | None) -> dict[str, int]:
|
||||
"""
|
||||
All the tool calls entry in the conversation history requires a response.
|
||||
This function returns the pending tool calls that has not generated an output yet.
|
||||
|
||||
Return: dict[str, int] - A dictionary of pending tool call IDs with their count.
|
||||
"""
|
||||
if not conversation_history:
|
||||
return {}
|
||||
|
||||
pending_calls = Counter()
|
||||
for history in conversation_history:
|
||||
for call_id in _get_tool_requests(history):
|
||||
@@ -173,7 +176,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
"Function parameters that has no default value and not optional typed has to be provided. ",
|
||||
description="The system prompt to provide additional context to the model.",
|
||||
)
|
||||
conversation_history: list[dict] = SchemaField(
|
||||
conversation_history: list[dict] | None = SchemaField(
|
||||
default_factory=list,
|
||||
description="The conversation history to provide context for the prompt.",
|
||||
)
|
||||
@@ -605,10 +608,10 @@ class SmartDecisionMakerBlock(Block):
|
||||
tool_functions = await self._create_tool_node_signatures(node_id)
|
||||
yield "tool_functions", json.dumps(tool_functions)
|
||||
|
||||
input_data.conversation_history = input_data.conversation_history or []
|
||||
prompt = [json.to_dict(p) for p in input_data.conversation_history if p]
|
||||
conversation_history = input_data.conversation_history or []
|
||||
prompt = [json.to_dict(p) for p in conversation_history if p]
|
||||
|
||||
pending_tool_calls = get_pending_tool_calls(input_data.conversation_history)
|
||||
pending_tool_calls = get_pending_tool_calls(conversation_history)
|
||||
if pending_tool_calls and input_data.last_tool_output is None:
|
||||
raise ValueError(f"Tool call requires an output for {pending_tool_calls}")
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from typing import Type
|
||||
|
||||
from backend.blocks.ai_image_customizer import AIImageCustomizerBlock, GeminiImageModel
|
||||
from backend.blocks.ai_image_generator_block import AIImageGeneratorBlock, ImageGenModel
|
||||
from backend.blocks.ai_music_generator import AIMusicGeneratorBlock
|
||||
from backend.blocks.ai_shortform_video_block import (
|
||||
AIAdMakerVideoCreatorBlock,
|
||||
@@ -9,6 +11,7 @@ from backend.blocks.ai_shortform_video_block import (
|
||||
from backend.blocks.apollo.organization import SearchOrganizationsBlock
|
||||
from backend.blocks.apollo.people import SearchPeopleBlock
|
||||
from backend.blocks.apollo.person import GetPersonDetailBlock
|
||||
from backend.blocks.codex import CodeGenerationBlock, CodexModel
|
||||
from backend.blocks.enrichlayer.linkedin import (
|
||||
GetLinkedinProfileBlock,
|
||||
GetLinkedinProfilePictureBlock,
|
||||
@@ -61,9 +64,10 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.O1_MINI: 4,
|
||||
# GPT-5 models
|
||||
LlmModel.GPT5: 2,
|
||||
LlmModel.GPT5_1: 5,
|
||||
LlmModel.GPT5_MINI: 1,
|
||||
LlmModel.GPT5_NANO: 1,
|
||||
LlmModel.GPT5_CHAT: 2,
|
||||
LlmModel.GPT5_CHAT: 5,
|
||||
LlmModel.GPT41: 2,
|
||||
LlmModel.GPT41_MINI: 1,
|
||||
LlmModel.GPT4O_MINI: 1,
|
||||
@@ -74,6 +78,7 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.CLAUDE_4_OPUS: 21,
|
||||
LlmModel.CLAUDE_4_SONNET: 5,
|
||||
LlmModel.CLAUDE_4_5_HAIKU: 4,
|
||||
LlmModel.CLAUDE_4_5_OPUS: 14,
|
||||
LlmModel.CLAUDE_4_5_SONNET: 9,
|
||||
LlmModel.CLAUDE_3_7_SONNET: 5,
|
||||
LlmModel.CLAUDE_3_HAIKU: 1,
|
||||
@@ -92,6 +97,7 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.OPENAI_GPT_OSS_120B: 1,
|
||||
LlmModel.OPENAI_GPT_OSS_20B: 1,
|
||||
LlmModel.GEMINI_2_5_PRO: 4,
|
||||
LlmModel.GEMINI_3_PRO_PREVIEW: 5,
|
||||
LlmModel.MISTRAL_NEMO: 1,
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: 3,
|
||||
@@ -113,6 +119,9 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.LLAMA_API_LLAMA3_3_8B: 1,
|
||||
LlmModel.LLAMA_API_LLAMA3_3_70B: 1,
|
||||
LlmModel.GROK_4: 9,
|
||||
LlmModel.GROK_4_FAST: 1,
|
||||
LlmModel.GROK_4_1_FAST: 1,
|
||||
LlmModel.GROK_CODE_FAST_1: 1,
|
||||
LlmModel.KIMI_K2: 1,
|
||||
LlmModel.QWEN3_235B_A22B_THINKING: 1,
|
||||
LlmModel.QWEN3_CODER: 9,
|
||||
@@ -258,6 +267,20 @@ BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
|
||||
AIStructuredResponseGeneratorBlock: LLM_COST,
|
||||
AITextSummarizerBlock: LLM_COST,
|
||||
AIListGeneratorBlock: LLM_COST,
|
||||
CodeGenerationBlock: [
|
||||
BlockCost(
|
||||
cost_type=BlockCostType.RUN,
|
||||
cost_filter={
|
||||
"model": CodexModel.GPT5_1_CODEX,
|
||||
"credentials": {
|
||||
"id": openai_credentials.id,
|
||||
"provider": openai_credentials.provider,
|
||||
"type": openai_credentials.type,
|
||||
},
|
||||
},
|
||||
cost_amount=5,
|
||||
)
|
||||
],
|
||||
CreateTalkingAvatarVideoBlock: [
|
||||
BlockCost(
|
||||
cost_amount=15,
|
||||
@@ -535,4 +558,85 @@ BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
|
||||
},
|
||||
)
|
||||
],
|
||||
AIImageGeneratorBlock: [
|
||||
BlockCost(
|
||||
cost_amount=5, # SD3.5 Medium: ~$0.035 per image
|
||||
cost_filter={
|
||||
"model": ImageGenModel.SD3_5,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
BlockCost(
|
||||
cost_amount=6, # Flux 1.1 Pro: ~$0.04 per image
|
||||
cost_filter={
|
||||
"model": ImageGenModel.FLUX,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
BlockCost(
|
||||
cost_amount=10, # Flux 1.1 Pro Ultra: ~$0.08 per image
|
||||
cost_filter={
|
||||
"model": ImageGenModel.FLUX_ULTRA,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
BlockCost(
|
||||
cost_amount=7, # Recraft v3: ~$0.05 per image
|
||||
cost_filter={
|
||||
"model": ImageGenModel.RECRAFT,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
BlockCost(
|
||||
cost_amount=14, # Nano Banana Pro: $0.14 per image at 2K
|
||||
cost_filter={
|
||||
"model": ImageGenModel.NANO_BANANA_PRO,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
AIImageCustomizerBlock: [
|
||||
BlockCost(
|
||||
cost_amount=10, # Nano Banana (original)
|
||||
cost_filter={
|
||||
"model": GeminiImageModel.NANO_BANANA,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
BlockCost(
|
||||
cost_amount=14, # Nano Banana Pro: $0.14 per image at 2K
|
||||
cost_filter={
|
||||
"model": GeminiImageModel.NANO_BANANA_PRO,
|
||||
"credentials": {
|
||||
"id": replicate_credentials.id,
|
||||
"provider": replicate_credentials.provider,
|
||||
"type": replicate_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
}
|
||||
|
||||
@@ -460,6 +460,7 @@ class NodeExecutionResult(BaseModel):
|
||||
async def get_graph_executions(
|
||||
graph_exec_id: Optional[str] = None,
|
||||
graph_id: Optional[str] = None,
|
||||
graph_version: Optional[int] = None,
|
||||
user_id: Optional[str] = None,
|
||||
statuses: Optional[list[ExecutionStatus]] = None,
|
||||
created_time_gte: Optional[datetime] = None,
|
||||
@@ -476,6 +477,8 @@ async def get_graph_executions(
|
||||
where_filter["userId"] = user_id
|
||||
if graph_id:
|
||||
where_filter["agentGraphId"] = graph_id
|
||||
if graph_version is not None:
|
||||
where_filter["agentGraphVersion"] = graph_version
|
||||
if created_time_gte or created_time_lte:
|
||||
where_filter["createdAt"] = {
|
||||
"gte": created_time_gte or datetime.min.replace(tzinfo=timezone.utc),
|
||||
|
||||
@@ -18,6 +18,7 @@ from prisma.types import (
|
||||
AgentGraphWhereInput,
|
||||
AgentNodeCreateInput,
|
||||
AgentNodeLinkCreateInput,
|
||||
StoreListingVersionWhereInput,
|
||||
)
|
||||
from pydantic import BaseModel, Field, create_model
|
||||
from pydantic.fields import computed_field
|
||||
@@ -884,9 +885,9 @@ async def get_graph_metadata(graph_id: str, version: int | None = None) -> Graph
|
||||
|
||||
async def get_graph(
|
||||
graph_id: str,
|
||||
version: int | None = None,
|
||||
version: int | None,
|
||||
user_id: str | None,
|
||||
*,
|
||||
user_id: str | None = None,
|
||||
for_export: bool = False,
|
||||
include_subgraphs: bool = False,
|
||||
skip_access_check: bool = False,
|
||||
@@ -897,26 +898,44 @@ async def get_graph(
|
||||
|
||||
Returns `None` if the record is not found.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {
|
||||
"id": graph_id,
|
||||
}
|
||||
graph = None
|
||||
|
||||
if version is not None:
|
||||
where_clause["version"] = version
|
||||
# Only search graph directly on owned graph (or access check is skipped)
|
||||
if skip_access_check or user_id is not None:
|
||||
graph_where_clause: AgentGraphWhereInput = {
|
||||
"id": graph_id,
|
||||
}
|
||||
if version is not None:
|
||||
graph_where_clause["version"] = version
|
||||
if not skip_access_check and user_id is not None:
|
||||
graph_where_clause["userId"] = user_id
|
||||
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
where=graph_where_clause,
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
|
||||
# Use store listed graph to find not owned graph
|
||||
if graph is None:
|
||||
store_where_clause: StoreListingVersionWhereInput = {
|
||||
"agentGraphId": graph_id,
|
||||
"submissionStatus": SubmissionStatus.APPROVED,
|
||||
"isDeleted": False,
|
||||
}
|
||||
if version is not None:
|
||||
store_where_clause["agentGraphVersion"] = version
|
||||
|
||||
if store_listing := await StoreListingVersion.prisma().find_first(
|
||||
where=store_where_clause,
|
||||
order={"agentGraphVersion": "desc"},
|
||||
include={"AgentGraph": {"include": AGENT_GRAPH_INCLUDE}},
|
||||
):
|
||||
graph = store_listing.AgentGraph
|
||||
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
where=where_clause,
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
if graph is None:
|
||||
return None
|
||||
|
||||
if not skip_access_check and graph.userId != user_id:
|
||||
# For access, the graph must be owned by the user or listed in the store
|
||||
if not await is_graph_published_in_marketplace(graph_id, graph.version):
|
||||
return None
|
||||
|
||||
if include_subgraphs or for_export:
|
||||
sub_graphs = await get_sub_graphs(graph)
|
||||
return GraphModel.from_db(
|
||||
|
||||
@@ -27,6 +27,101 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Default system prompt template for activity status generation
|
||||
DEFAULT_SYSTEM_PROMPT = """You are an AI assistant analyzing what an agent execution accomplished and whether it worked correctly.
|
||||
You need to provide both a user-friendly summary AND a correctness assessment.
|
||||
|
||||
FOR THE ACTIVITY STATUS:
|
||||
- Write from the user's perspective about what they accomplished, NOT about technical execution details
|
||||
- Focus on the ACTUAL TASK the user wanted done, not the internal workflow steps
|
||||
- Avoid technical terms like 'workflow', 'execution', 'components', 'nodes', 'processing', etc.
|
||||
- Keep it to 3 sentences maximum. Be conversational and human-friendly
|
||||
|
||||
FOR THE CORRECTNESS SCORE:
|
||||
- Provide a score from 0.0 to 1.0 indicating how well the execution achieved its intended purpose
|
||||
- Use this scoring guide:
|
||||
0.0-0.2: Failure - The result clearly did not meet the task requirements
|
||||
0.2-0.4: Poor - Major issues; only small parts of the goal were achieved
|
||||
0.4-0.6: Partial Success - Some objectives met, but with noticeable gaps or inaccuracies
|
||||
0.6-0.8: Mostly Successful - Largely achieved the intended outcome, with minor flaws
|
||||
0.8-1.0: Success - Fully met or exceeded the task requirements
|
||||
- Base the score on actual outputs produced, not just technical completion
|
||||
|
||||
UNDERSTAND THE INTENDED PURPOSE:
|
||||
- FIRST: Read the graph description carefully to understand what the user wanted to accomplish
|
||||
- The graph name and description tell you the main goal/intention of this automation
|
||||
- Use this intended purpose as your PRIMARY criteria for success/failure evaluation
|
||||
- Ask yourself: 'Did this execution actually accomplish what the graph was designed to do?'
|
||||
|
||||
CRITICAL OUTPUT ANALYSIS:
|
||||
- Check if blocks that should produce user-facing results actually produced outputs
|
||||
- Blocks with names containing 'Output', 'Post', 'Create', 'Send', 'Publish', 'Generate' are usually meant to produce final results
|
||||
- If these critical blocks have NO outputs (empty recent_outputs), the task likely FAILED even if status shows 'completed'
|
||||
- Sub-agents (AgentExecutorBlock) that produce no outputs usually indicate failed sub-tasks
|
||||
- Most importantly: Does the execution result match what the graph description promised to deliver?
|
||||
|
||||
SUCCESS EVALUATION BASED ON INTENTION:
|
||||
- If the graph is meant to 'create blog posts' → check if blog content was actually created
|
||||
- If the graph is meant to 'send emails' → check if emails were actually sent
|
||||
- If the graph is meant to 'analyze data' → check if analysis results were produced
|
||||
- If the graph is meant to 'generate reports' → check if reports were generated
|
||||
- Technical completion ≠ goal achievement. Focus on whether the USER'S INTENDED OUTCOME was delivered
|
||||
|
||||
IMPORTANT: Be HONEST about what actually happened:
|
||||
- If the input was invalid/nonsensical, say so directly
|
||||
- If the task failed, explain what went wrong in simple terms
|
||||
- If errors occurred, focus on what the user needs to know
|
||||
- Only claim success if the INTENDED PURPOSE was genuinely accomplished AND produced expected outputs
|
||||
- Don't sugar-coat failures or present them as helpful feedback
|
||||
- ESPECIALLY: If the graph's main purpose wasn't achieved, this is a failure regardless of 'completed' status
|
||||
|
||||
Understanding Errors:
|
||||
- Node errors: Individual steps may fail but the overall task might still complete (e.g., one data source fails but others work)
|
||||
- Graph error (in overall_status.graph_error): This means the entire execution failed and nothing was accomplished
|
||||
- Missing outputs from critical blocks: Even if no errors, this means the task failed to produce expected results
|
||||
- Focus on whether the graph's intended purpose was fulfilled, not whether technical steps completed"""
|
||||
|
||||
# Default user prompt template for activity status generation
|
||||
DEFAULT_USER_PROMPT = """A user ran '{{GRAPH_NAME}}' to accomplish something. Based on this execution data,
|
||||
provide both an activity summary and correctness assessment:
|
||||
|
||||
{{EXECUTION_DATA}}
|
||||
|
||||
ANALYSIS CHECKLIST:
|
||||
1. READ graph_info.description FIRST - this tells you what the user intended to accomplish
|
||||
2. Check overall_status.graph_error - if present, the entire execution failed
|
||||
3. Look for nodes with 'Output', 'Post', 'Create', 'Send', 'Publish', 'Generate' in their block_name
|
||||
4. Check if these critical blocks have empty recent_outputs arrays - this indicates failure
|
||||
5. Look for AgentExecutorBlock (sub-agents) with no outputs - this suggests sub-task failures
|
||||
6. Count how many nodes produced outputs vs total nodes - low ratio suggests problems
|
||||
7. MOST IMPORTANT: Does the execution outcome match what graph_info.description promised?
|
||||
|
||||
INTENTION-BASED EVALUATION:
|
||||
- If description mentions 'blog writing' → did it create blog content?
|
||||
- If description mentions 'email automation' → were emails actually sent?
|
||||
- If description mentions 'data analysis' → were analysis results produced?
|
||||
- If description mentions 'content generation' → was content actually generated?
|
||||
- If description mentions 'social media posting' → were posts actually made?
|
||||
- Match the outputs to the stated intention, not just technical completion
|
||||
|
||||
PROVIDE:
|
||||
activity_status: 1-3 sentences about what the user accomplished, such as:
|
||||
- 'I analyzed your resume and provided detailed feedback for the IT industry.'
|
||||
- 'I couldn't complete the task because critical steps failed to produce any results.'
|
||||
- 'I failed to generate the content you requested due to missing API access.'
|
||||
- 'I extracted key information from your documents and organized it into a summary.'
|
||||
- 'The task failed because the blog post creation step didn't produce any output.'
|
||||
|
||||
correctness_score: A float score from 0.0 to 1.0 based on how well the intended purpose was achieved:
|
||||
- 0.0-0.2: Failure (didn't meet requirements)
|
||||
- 0.2-0.4: Poor (major issues, minimal achievement)
|
||||
- 0.4-0.6: Partial Success (some objectives met with gaps)
|
||||
- 0.6-0.8: Mostly Successful (largely achieved with minor flaws)
|
||||
- 0.8-1.0: Success (fully met or exceeded requirements)
|
||||
|
||||
BE CRITICAL: If the graph's intended purpose (from description) wasn't achieved, use a low score (0.0-0.4) even if status is 'completed'."""
|
||||
|
||||
|
||||
class ErrorInfo(TypedDict):
|
||||
"""Type definition for error information."""
|
||||
|
||||
@@ -93,6 +188,9 @@ async def generate_activity_status_for_execution(
|
||||
execution_status: ExecutionStatus | None = None,
|
||||
model_name: str = "gpt-4o-mini",
|
||||
skip_feature_flag: bool = False,
|
||||
system_prompt: str = DEFAULT_SYSTEM_PROMPT,
|
||||
user_prompt: str = DEFAULT_USER_PROMPT,
|
||||
skip_existing: bool = True,
|
||||
) -> ActivityStatusResponse | None:
|
||||
"""
|
||||
Generate an AI-based activity status summary and correctness assessment for a graph execution.
|
||||
@@ -108,10 +206,15 @@ async def generate_activity_status_for_execution(
|
||||
db_client: Database client for fetching data
|
||||
user_id: User ID for LaunchDarkly feature flag evaluation
|
||||
execution_status: The overall execution status (COMPLETED, FAILED, TERMINATED)
|
||||
model_name: AI model to use for generation (default: gpt-4o-mini)
|
||||
skip_feature_flag: Whether to skip LaunchDarkly feature flag check
|
||||
system_prompt: Custom system prompt template (default: DEFAULT_SYSTEM_PROMPT)
|
||||
user_prompt: Custom user prompt template with placeholders (default: DEFAULT_USER_PROMPT)
|
||||
skip_existing: Whether to skip if activity_status and correctness_score already exist
|
||||
|
||||
Returns:
|
||||
AI-generated activity status response with activity_status and correctness_status,
|
||||
or None if feature is disabled
|
||||
or None if feature is disabled or skipped
|
||||
"""
|
||||
# Check LaunchDarkly feature flag for AI activity status generation with full context support
|
||||
if not skip_feature_flag and not await is_feature_enabled(
|
||||
@@ -120,6 +223,20 @@ async def generate_activity_status_for_execution(
|
||||
logger.debug("AI activity status generation is disabled via LaunchDarkly")
|
||||
return None
|
||||
|
||||
# Check if we should skip existing data (for admin regeneration option)
|
||||
if (
|
||||
skip_existing
|
||||
and execution_stats.activity_status
|
||||
and execution_stats.correctness_score is not None
|
||||
):
|
||||
logger.debug(
|
||||
f"Skipping activity status generation for {graph_exec_id}: already exists"
|
||||
)
|
||||
return {
|
||||
"activity_status": execution_stats.activity_status,
|
||||
"correctness_score": execution_stats.correctness_score,
|
||||
}
|
||||
|
||||
# Check if we have OpenAI API key
|
||||
try:
|
||||
settings = Settings()
|
||||
@@ -136,7 +253,12 @@ async def generate_activity_status_for_execution(
|
||||
|
||||
# Get graph metadata and full graph structure for name, description, and links
|
||||
graph_metadata = await db_client.get_graph_metadata(graph_id, graph_version)
|
||||
graph = await db_client.get_graph(graph_id, graph_version)
|
||||
graph = await db_client.get_graph(
|
||||
graph_id=graph_id,
|
||||
version=graph_version,
|
||||
user_id=user_id,
|
||||
skip_access_check=True,
|
||||
)
|
||||
|
||||
graph_name = graph_metadata.name if graph_metadata else f"Graph {graph_id}"
|
||||
graph_description = graph_metadata.description if graph_metadata else ""
|
||||
@@ -152,94 +274,23 @@ async def generate_activity_status_for_execution(
|
||||
execution_status,
|
||||
)
|
||||
|
||||
# Prepare execution data as JSON for template substitution
|
||||
execution_data_json = json.dumps(execution_data, indent=2)
|
||||
|
||||
# Perform template substitution for user prompt
|
||||
user_prompt_content = user_prompt.replace("{{GRAPH_NAME}}", graph_name).replace(
|
||||
"{{EXECUTION_DATA}}", execution_data_json
|
||||
)
|
||||
|
||||
# Prepare prompt for AI with structured output requirements
|
||||
prompt = [
|
||||
{
|
||||
"role": "system",
|
||||
"content": (
|
||||
"You are an AI assistant analyzing what an agent execution accomplished and whether it worked correctly. "
|
||||
"You need to provide both a user-friendly summary AND a correctness assessment.\n\n"
|
||||
"FOR THE ACTIVITY STATUS:\n"
|
||||
"- Write from the user's perspective about what they accomplished, NOT about technical execution details\n"
|
||||
"- Focus on the ACTUAL TASK the user wanted done, not the internal workflow steps\n"
|
||||
"- Avoid technical terms like 'workflow', 'execution', 'components', 'nodes', 'processing', etc.\n"
|
||||
"- Keep it to 3 sentences maximum. Be conversational and human-friendly\n\n"
|
||||
"FOR THE CORRECTNESS SCORE:\n"
|
||||
"- Provide a score from 0.0 to 1.0 indicating how well the execution achieved its intended purpose\n"
|
||||
"- Use this scoring guide:\n"
|
||||
" 0.0-0.2: Failure - The result clearly did not meet the task requirements\n"
|
||||
" 0.2-0.4: Poor - Major issues; only small parts of the goal were achieved\n"
|
||||
" 0.4-0.6: Partial Success - Some objectives met, but with noticeable gaps or inaccuracies\n"
|
||||
" 0.6-0.8: Mostly Successful - Largely achieved the intended outcome, with minor flaws\n"
|
||||
" 0.8-1.0: Success - Fully met or exceeded the task requirements\n"
|
||||
"- Base the score on actual outputs produced, not just technical completion\n\n"
|
||||
"UNDERSTAND THE INTENDED PURPOSE:\n"
|
||||
"- FIRST: Read the graph description carefully to understand what the user wanted to accomplish\n"
|
||||
"- The graph name and description tell you the main goal/intention of this automation\n"
|
||||
"- Use this intended purpose as your PRIMARY criteria for success/failure evaluation\n"
|
||||
"- Ask yourself: 'Did this execution actually accomplish what the graph was designed to do?'\n\n"
|
||||
"CRITICAL OUTPUT ANALYSIS:\n"
|
||||
"- Check if blocks that should produce user-facing results actually produced outputs\n"
|
||||
"- Blocks with names containing 'Output', 'Post', 'Create', 'Send', 'Publish', 'Generate' are usually meant to produce final results\n"
|
||||
"- If these critical blocks have NO outputs (empty recent_outputs), the task likely FAILED even if status shows 'completed'\n"
|
||||
"- Sub-agents (AgentExecutorBlock) that produce no outputs usually indicate failed sub-tasks\n"
|
||||
"- Most importantly: Does the execution result match what the graph description promised to deliver?\n\n"
|
||||
"SUCCESS EVALUATION BASED ON INTENTION:\n"
|
||||
"- If the graph is meant to 'create blog posts' → check if blog content was actually created\n"
|
||||
"- If the graph is meant to 'send emails' → check if emails were actually sent\n"
|
||||
"- If the graph is meant to 'analyze data' → check if analysis results were produced\n"
|
||||
"- If the graph is meant to 'generate reports' → check if reports were generated\n"
|
||||
"- Technical completion ≠ goal achievement. Focus on whether the USER'S INTENDED OUTCOME was delivered\n\n"
|
||||
"IMPORTANT: Be HONEST about what actually happened:\n"
|
||||
"- If the input was invalid/nonsensical, say so directly\n"
|
||||
"- If the task failed, explain what went wrong in simple terms\n"
|
||||
"- If errors occurred, focus on what the user needs to know\n"
|
||||
"- Only claim success if the INTENDED PURPOSE was genuinely accomplished AND produced expected outputs\n"
|
||||
"- Don't sugar-coat failures or present them as helpful feedback\n"
|
||||
"- ESPECIALLY: If the graph's main purpose wasn't achieved, this is a failure regardless of 'completed' status\n\n"
|
||||
"Understanding Errors:\n"
|
||||
"- Node errors: Individual steps may fail but the overall task might still complete (e.g., one data source fails but others work)\n"
|
||||
"- Graph error (in overall_status.graph_error): This means the entire execution failed and nothing was accomplished\n"
|
||||
"- Missing outputs from critical blocks: Even if no errors, this means the task failed to produce expected results\n"
|
||||
"- Focus on whether the graph's intended purpose was fulfilled, not whether technical steps completed"
|
||||
),
|
||||
"content": system_prompt,
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": (
|
||||
f"A user ran '{graph_name}' to accomplish something. Based on this execution data, "
|
||||
f"provide both an activity summary and correctness assessment:\n\n"
|
||||
f"{json.dumps(execution_data, indent=2)}\n\n"
|
||||
"ANALYSIS CHECKLIST:\n"
|
||||
"1. READ graph_info.description FIRST - this tells you what the user intended to accomplish\n"
|
||||
"2. Check overall_status.graph_error - if present, the entire execution failed\n"
|
||||
"3. Look for nodes with 'Output', 'Post', 'Create', 'Send', 'Publish', 'Generate' in their block_name\n"
|
||||
"4. Check if these critical blocks have empty recent_outputs arrays - this indicates failure\n"
|
||||
"5. Look for AgentExecutorBlock (sub-agents) with no outputs - this suggests sub-task failures\n"
|
||||
"6. Count how many nodes produced outputs vs total nodes - low ratio suggests problems\n"
|
||||
"7. MOST IMPORTANT: Does the execution outcome match what graph_info.description promised?\n\n"
|
||||
"INTENTION-BASED EVALUATION:\n"
|
||||
"- If description mentions 'blog writing' → did it create blog content?\n"
|
||||
"- If description mentions 'email automation' → were emails actually sent?\n"
|
||||
"- If description mentions 'data analysis' → were analysis results produced?\n"
|
||||
"- If description mentions 'content generation' → was content actually generated?\n"
|
||||
"- If description mentions 'social media posting' → were posts actually made?\n"
|
||||
"- Match the outputs to the stated intention, not just technical completion\n\n"
|
||||
"PROVIDE:\n"
|
||||
"activity_status: 1-3 sentences about what the user accomplished, such as:\n"
|
||||
"- 'I analyzed your resume and provided detailed feedback for the IT industry.'\n"
|
||||
"- 'I couldn't complete the task because critical steps failed to produce any results.'\n"
|
||||
"- 'I failed to generate the content you requested due to missing API access.'\n"
|
||||
"- 'I extracted key information from your documents and organized it into a summary.'\n"
|
||||
"- 'The task failed because the blog post creation step didn't produce any output.'\n\n"
|
||||
"correctness_score: A float score from 0.0 to 1.0 based on how well the intended purpose was achieved:\n"
|
||||
"- 0.0-0.2: Failure (didn't meet requirements)\n"
|
||||
"- 0.2-0.4: Poor (major issues, minimal achievement)\n"
|
||||
"- 0.4-0.6: Partial Success (some objectives met with gaps)\n"
|
||||
"- 0.6-0.8: Mostly Successful (largely achieved with minor flaws)\n"
|
||||
"- 0.8-1.0: Success (fully met or exceeded requirements)\n\n"
|
||||
"BE CRITICAL: If the graph's intended purpose (from description) wasn't achieved, use a low score (0.0-0.4) even if status is 'completed'."
|
||||
),
|
||||
"content": user_prompt_content,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -252,9 +252,9 @@ async def execute_node(
|
||||
output_size += len(json.dumps(output_data))
|
||||
log_metadata.debug("Node produced output", **{output_name: output_data})
|
||||
yield output_name, output_data
|
||||
except Exception:
|
||||
except Exception as ex:
|
||||
# Capture exception WITH context still set before restoring scope
|
||||
sentry_sdk.capture_exception(scope=scope)
|
||||
sentry_sdk.capture_exception(error=ex, scope=scope)
|
||||
sentry_sdk.flush() # Ensure it's sent before we restore scope
|
||||
# Re-raise to maintain normal error flow
|
||||
raise
|
||||
|
||||
@@ -2,6 +2,7 @@ import asyncio
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
|
||||
@@ -36,7 +37,9 @@ from backend.monitoring import (
|
||||
from backend.util.clients import get_scheduler_client
|
||||
from backend.util.cloud_storage import cleanup_expired_files_async
|
||||
from backend.util.exceptions import (
|
||||
GraphNotFoundError,
|
||||
GraphNotInLibraryError,
|
||||
GraphValidationError,
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
)
|
||||
@@ -160,14 +163,12 @@ async def _execute_graph(**kwargs):
|
||||
f"Graph execution {graph_exec.id} took {elapsed:.2f}s to create/publish - "
|
||||
f"this is unusually slow and may indicate resource contention"
|
||||
)
|
||||
except GraphNotFoundError as e:
|
||||
await _handle_graph_not_available(e, args, start_time)
|
||||
except GraphNotInLibraryError as e:
|
||||
elapsed = asyncio.get_event_loop().time() - start_time
|
||||
logger.warning(
|
||||
f"Scheduled execution blocked for deleted/archived graph {args.graph_id} "
|
||||
f"(user {args.user_id}) after {elapsed:.2f}s: {e}"
|
||||
)
|
||||
# Clean up orphaned schedules for this graph
|
||||
await _cleanup_orphaned_schedules_for_graph(args.graph_id, args.user_id)
|
||||
await _handle_graph_not_available(e, args, start_time)
|
||||
except GraphValidationError:
|
||||
await _handle_graph_validation_error(args)
|
||||
except Exception as e:
|
||||
elapsed = asyncio.get_event_loop().time() - start_time
|
||||
logger.error(
|
||||
@@ -176,6 +177,34 @@ async def _execute_graph(**kwargs):
|
||||
)
|
||||
|
||||
|
||||
async def _handle_graph_validation_error(args: "GraphExecutionJobArgs") -> None:
|
||||
logger.error(
|
||||
f"Scheduled Graph {args.graph_id} failed validation. Unscheduling graph"
|
||||
)
|
||||
if args.schedule_id:
|
||||
scheduler_client = get_scheduler_client()
|
||||
await scheduler_client.delete_schedule(
|
||||
schedule_id=args.schedule_id,
|
||||
user_id=args.user_id,
|
||||
)
|
||||
else:
|
||||
logger.error(
|
||||
f"Unable to unschedule graph: {args.graph_id} as this is an old job with no associated schedule_id please remove manually"
|
||||
)
|
||||
|
||||
|
||||
async def _handle_graph_not_available(
|
||||
e: Exception, args: "GraphExecutionJobArgs", start_time: float
|
||||
) -> None:
|
||||
elapsed = asyncio.get_event_loop().time() - start_time
|
||||
logger.warning(
|
||||
f"Scheduled execution blocked for deleted/archived graph {args.graph_id} "
|
||||
f"(user {args.user_id}) after {elapsed:.2f}s: {e}"
|
||||
)
|
||||
# Clean up orphaned schedules for this graph
|
||||
await _cleanup_orphaned_schedules_for_graph(args.graph_id, args.user_id)
|
||||
|
||||
|
||||
async def _cleanup_orphaned_schedules_for_graph(graph_id: str, user_id: str) -> None:
|
||||
"""
|
||||
Clean up orphaned schedules for a specific graph when execution fails with GraphNotAccessibleError.
|
||||
@@ -220,9 +249,11 @@ class Jobstores(Enum):
|
||||
|
||||
|
||||
class GraphExecutionJobArgs(BaseModel):
|
||||
schedule_id: str | None = None
|
||||
user_id: str
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
agent_name: str | None = None
|
||||
cron: str
|
||||
input_data: BlockInput
|
||||
input_credentials: dict[str, CredentialsMetaInput] = Field(default_factory=dict)
|
||||
@@ -468,11 +499,14 @@ class Scheduler(AppService):
|
||||
logger.info(
|
||||
f"Scheduling job for user {user_id} with timezone {user_timezone} (cron: {cron})"
|
||||
)
|
||||
schedule_id = str(uuid.uuid4())
|
||||
|
||||
job_args = GraphExecutionJobArgs(
|
||||
schedule_id=schedule_id,
|
||||
user_id=user_id,
|
||||
graph_id=graph_id,
|
||||
graph_version=graph_version,
|
||||
agent_name=name,
|
||||
cron=cron,
|
||||
input_data=input_data,
|
||||
input_credentials=input_credentials,
|
||||
@@ -484,6 +518,7 @@ class Scheduler(AppService):
|
||||
trigger=CronTrigger.from_crontab(cron, timezone=user_timezone),
|
||||
jobstore=Jobstores.EXECUTION.value,
|
||||
replace_existing=True,
|
||||
id=schedule_id,
|
||||
)
|
||||
logger.info(
|
||||
f"Added job {job.id} with cron schedule '{cron}' in timezone {user_timezone}, input data: {input_data}"
|
||||
|
||||
@@ -42,7 +42,11 @@ from backend.util.clients import (
|
||||
get_database_manager_async_client,
|
||||
get_integration_credentials_store,
|
||||
)
|
||||
from backend.util.exceptions import GraphValidationError, NotFoundError
|
||||
from backend.util.exceptions import (
|
||||
GraphNotFoundError,
|
||||
GraphValidationError,
|
||||
NotFoundError,
|
||||
)
|
||||
from backend.util.logging import TruncatedLogger, is_structured_logging_enabled
|
||||
from backend.util.settings import Config
|
||||
from backend.util.type import convert
|
||||
@@ -516,7 +520,7 @@ async def validate_and_construct_node_execution_input(
|
||||
skip_access_check=True,
|
||||
)
|
||||
if not graph:
|
||||
raise NotFoundError(f"Graph #{graph_id} not found.")
|
||||
raise GraphNotFoundError(f"Graph #{graph_id} not found.")
|
||||
|
||||
# Validate that the user has permission to execute this graph
|
||||
# This checks both library membership and execution permissions,
|
||||
|
||||
@@ -106,10 +106,6 @@ async def get_graph_execution_results(
|
||||
graph_exec_id: str,
|
||||
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||
) -> GraphExecutionResult:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=api_key.user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
graph_exec = await execution_db.get_graph_execution(
|
||||
user_id=api_key.user_id,
|
||||
execution_id=graph_exec_id,
|
||||
@@ -120,6 +116,13 @@ async def get_graph_execution_results(
|
||||
status_code=404, detail=f"Graph execution #{graph_exec_id} not found."
|
||||
)
|
||||
|
||||
if not await graph_db.get_graph(
|
||||
graph_id=graph_exec.graph_id,
|
||||
version=graph_exec.graph_version,
|
||||
user_id=api_key.user_id,
|
||||
):
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
return GraphExecutionResult(
|
||||
execution_id=graph_exec_id,
|
||||
status=graph_exec.status.value,
|
||||
|
||||
@@ -35,7 +35,6 @@ import backend.server.v2.library.routes
|
||||
import backend.server.v2.otto.routes
|
||||
import backend.server.v2.store.model
|
||||
import backend.server.v2.store.routes
|
||||
import backend.server.v2.turnstile.routes
|
||||
import backend.util.service
|
||||
import backend.util.settings
|
||||
from backend.blocks.llm import LlmModel
|
||||
@@ -281,11 +280,6 @@ app.include_router(
|
||||
app.include_router(
|
||||
backend.server.v2.otto.routes.router, tags=["v2", "otto"], prefix="/api/otto"
|
||||
)
|
||||
app.include_router(
|
||||
backend.server.v2.turnstile.routes.router,
|
||||
tags=["v2", "turnstile"],
|
||||
prefix="/api/turnstile",
|
||||
)
|
||||
|
||||
app.include_router(
|
||||
backend.server.routers.postmark.postmark.router,
|
||||
|
||||
@@ -803,7 +803,9 @@ async def create_new_graph(
|
||||
async def delete_graph(
|
||||
graph_id: str, user_id: Annotated[str, Security(get_user_id)]
|
||||
) -> DeleteGraphResponse:
|
||||
if active_version := await graph_db.get_graph(graph_id, user_id=user_id):
|
||||
if active_version := await graph_db.get_graph(
|
||||
graph_id=graph_id, version=None, user_id=user_id
|
||||
):
|
||||
await on_graph_deactivate(active_version, user_id=user_id)
|
||||
|
||||
return {"version_counts": await graph_db.delete_graph(graph_id, user_id=user_id)}
|
||||
@@ -883,7 +885,11 @@ async def set_graph_active_version(
|
||||
if not new_active_graph:
|
||||
raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found")
|
||||
|
||||
current_active_graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
current_active_graph = await graph_db.get_graph(
|
||||
graph_id=graph_id,
|
||||
version=None,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
await on_graph_activate(new_active_graph, user_id=user_id)
|
||||
@@ -1069,22 +1075,25 @@ async def get_graph_execution(
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Security(get_user_id)],
|
||||
) -> execution_db.GraphExecution | execution_db.GraphExecutionWithNodes:
|
||||
graph = await graph_db.get_graph(graph_id=graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
status_code=HTTP_404_NOT_FOUND, detail=f"Graph #{graph_id} not found"
|
||||
)
|
||||
|
||||
result = await execution_db.get_graph_execution(
|
||||
user_id=user_id,
|
||||
execution_id=graph_exec_id,
|
||||
include_node_executions=graph.user_id == user_id,
|
||||
include_node_executions=True,
|
||||
)
|
||||
if not result or result.graph_id != graph_id:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Graph execution #{graph_exec_id} not found."
|
||||
)
|
||||
|
||||
if not await graph_db.get_graph(
|
||||
graph_id=result.graph_id,
|
||||
version=result.graph_version,
|
||||
user_id=user_id,
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=HTTP_404_NOT_FOUND, detail=f"Graph #{graph_id} not found"
|
||||
)
|
||||
|
||||
# Apply feature flags to filter out disabled features
|
||||
result = await hide_activity_summary_if_disabled(result, user_id)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ from autogpt_libs.auth import get_user_id, requires_admin_user
|
||||
from fastapi import APIRouter, HTTPException, Security
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.execution import (
|
||||
ExecutionStatus,
|
||||
GraphExecutionMeta,
|
||||
@@ -15,6 +16,8 @@ from backend.data.execution import (
|
||||
)
|
||||
from backend.data.model import GraphExecutionStats
|
||||
from backend.executor.activity_status_generator import (
|
||||
DEFAULT_SYSTEM_PROMPT,
|
||||
DEFAULT_USER_PROMPT,
|
||||
generate_activity_status_for_execution,
|
||||
)
|
||||
from backend.executor.manager import get_db_async_client
|
||||
@@ -30,12 +33,21 @@ class ExecutionAnalyticsRequest(BaseModel):
|
||||
created_after: Optional[datetime] = Field(
|
||||
None, description="Optional created date lower bound"
|
||||
)
|
||||
model_name: Optional[str] = Field(
|
||||
"gpt-4o-mini", description="Model to use for generation"
|
||||
)
|
||||
model_name: str = Field("gpt-4o-mini", description="Model to use for generation")
|
||||
batch_size: int = Field(
|
||||
10, description="Batch size for concurrent processing", le=25, ge=1
|
||||
)
|
||||
system_prompt: Optional[str] = Field(
|
||||
None, description="Custom system prompt (default: built-in prompt)"
|
||||
)
|
||||
user_prompt: Optional[str] = Field(
|
||||
None,
|
||||
description="Custom user prompt with {{GRAPH_NAME}} and {{EXECUTION_DATA}} placeholders (default: built-in prompt)",
|
||||
)
|
||||
skip_existing: bool = Field(
|
||||
True,
|
||||
description="Whether to skip executions that already have activity status and correctness score",
|
||||
)
|
||||
|
||||
|
||||
class ExecutionAnalyticsResult(BaseModel):
|
||||
@@ -58,6 +70,19 @@ class ExecutionAnalyticsResponse(BaseModel):
|
||||
results: list[ExecutionAnalyticsResult]
|
||||
|
||||
|
||||
class ModelInfo(BaseModel):
|
||||
value: str
|
||||
label: str
|
||||
provider: str
|
||||
|
||||
|
||||
class ExecutionAnalyticsConfig(BaseModel):
|
||||
available_models: list[ModelInfo]
|
||||
default_system_prompt: str
|
||||
default_user_prompt: str
|
||||
recommended_model: str
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/admin",
|
||||
tags=["admin", "execution_analytics"],
|
||||
@@ -65,6 +90,100 @@ router = APIRouter(
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/execution_analytics/config",
|
||||
response_model=ExecutionAnalyticsConfig,
|
||||
summary="Get Execution Analytics Configuration",
|
||||
)
|
||||
async def get_execution_analytics_config(
|
||||
admin_user_id: str = Security(get_user_id),
|
||||
):
|
||||
"""
|
||||
Get the configuration for execution analytics including:
|
||||
- Available AI models with metadata
|
||||
- Default system and user prompts
|
||||
- Recommended model selection
|
||||
"""
|
||||
logger.info(f"Admin user {admin_user_id} requesting execution analytics config")
|
||||
|
||||
# Generate model list from LlmModel enum with provider information
|
||||
available_models = []
|
||||
|
||||
# Function to generate friendly display names from model values
|
||||
def generate_model_label(model: LlmModel) -> str:
|
||||
"""Generate a user-friendly label from the model enum value."""
|
||||
value = model.value
|
||||
|
||||
# For all models, convert underscores/hyphens to spaces and title case
|
||||
# e.g., "gpt-4-turbo" -> "GPT 4 Turbo", "claude-3-haiku-20240307" -> "Claude 3 Haiku"
|
||||
parts = value.replace("_", "-").split("-")
|
||||
|
||||
# Handle provider prefixes (e.g., "google/", "x-ai/")
|
||||
if "/" in value:
|
||||
_, model_name = value.split("/", 1)
|
||||
parts = model_name.replace("_", "-").split("-")
|
||||
|
||||
# Capitalize and format parts
|
||||
formatted_parts = []
|
||||
for part in parts:
|
||||
# Skip date-like patterns - check for various date formats:
|
||||
# - Long dates like "20240307" (8 digits)
|
||||
# - Year components like "2024", "2025" (4 digit years >= 2020)
|
||||
# - Month/day components like "04", "16" when they appear to be dates
|
||||
if part.isdigit():
|
||||
if len(part) >= 8: # Long date format like "20240307"
|
||||
continue
|
||||
elif len(part) == 4 and int(part) >= 2020: # Year like "2024", "2025"
|
||||
continue
|
||||
elif len(part) <= 2 and int(part) <= 31: # Month/day like "04", "16"
|
||||
# Skip if this looks like a date component (basic heuristic)
|
||||
continue
|
||||
# Keep version numbers as-is
|
||||
if part.replace(".", "").isdigit():
|
||||
formatted_parts.append(part)
|
||||
# Capitalize normal words
|
||||
else:
|
||||
formatted_parts.append(
|
||||
part.upper()
|
||||
if part.upper() in ["GPT", "LLM", "API", "V0"]
|
||||
else part.capitalize()
|
||||
)
|
||||
|
||||
model_name = " ".join(formatted_parts)
|
||||
|
||||
# Format provider name for better display
|
||||
provider_name = model.provider.replace("_", " ").title()
|
||||
|
||||
# Return with provider prefix for clarity
|
||||
return f"{provider_name}: {model_name}"
|
||||
|
||||
# Include all LlmModel values (no more filtering by hardcoded list)
|
||||
recommended_model = LlmModel.GPT4O_MINI.value
|
||||
for model in LlmModel:
|
||||
label = generate_model_label(model)
|
||||
# Add "(Recommended)" suffix to the recommended model
|
||||
if model.value == recommended_model:
|
||||
label += " (Recommended)"
|
||||
|
||||
available_models.append(
|
||||
ModelInfo(
|
||||
value=model.value,
|
||||
label=label,
|
||||
provider=model.provider,
|
||||
)
|
||||
)
|
||||
|
||||
# Sort models by provider and name for better UX
|
||||
available_models.sort(key=lambda x: (x.provider, x.label))
|
||||
|
||||
return ExecutionAnalyticsConfig(
|
||||
available_models=available_models,
|
||||
default_system_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
default_user_prompt=DEFAULT_USER_PROMPT,
|
||||
recommended_model=recommended_model,
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/execution_analytics",
|
||||
response_model=ExecutionAnalyticsResponse,
|
||||
@@ -100,6 +219,7 @@ async def generate_execution_analytics(
|
||||
# Fetch executions to process
|
||||
executions = await get_graph_executions(
|
||||
graph_id=request.graph_id,
|
||||
graph_version=request.graph_version,
|
||||
user_id=request.user_id,
|
||||
created_time_gte=request.created_after,
|
||||
statuses=[
|
||||
@@ -113,21 +233,20 @@ async def generate_execution_analytics(
|
||||
f"Found {len(executions)} total executions for graph {request.graph_id}"
|
||||
)
|
||||
|
||||
# Filter executions that need analytics generation (missing activity_status or correctness_score)
|
||||
# Filter executions that need analytics generation
|
||||
executions_to_process = []
|
||||
for execution in executions:
|
||||
# Skip if we should skip existing analytics and both activity_status and correctness_score exist
|
||||
if (
|
||||
not execution.stats
|
||||
or not execution.stats.activity_status
|
||||
or execution.stats.correctness_score is None
|
||||
request.skip_existing
|
||||
and execution.stats
|
||||
and execution.stats.activity_status
|
||||
and execution.stats.correctness_score is not None
|
||||
):
|
||||
continue
|
||||
|
||||
# If version is specified, filter by it
|
||||
if (
|
||||
request.graph_version is None
|
||||
or execution.graph_version == request.graph_version
|
||||
):
|
||||
executions_to_process.append(execution)
|
||||
# Add execution to processing list
|
||||
executions_to_process.append(execution)
|
||||
|
||||
logger.info(
|
||||
f"Found {len(executions_to_process)} executions needing analytics generation"
|
||||
@@ -152,9 +271,7 @@ async def generate_execution_analytics(
|
||||
f"Processing batch {batch_idx + 1}/{total_batches} with {len(batch)} executions"
|
||||
)
|
||||
|
||||
batch_results = await _process_batch(
|
||||
batch, request.model_name or "gpt-4o-mini", db_client
|
||||
)
|
||||
batch_results = await _process_batch(batch, request, db_client)
|
||||
|
||||
for result in batch_results:
|
||||
results.append(result)
|
||||
@@ -212,7 +329,7 @@ async def generate_execution_analytics(
|
||||
|
||||
|
||||
async def _process_batch(
|
||||
executions, model_name: str, db_client
|
||||
executions, request: ExecutionAnalyticsRequest, db_client
|
||||
) -> list[ExecutionAnalyticsResult]:
|
||||
"""Process a batch of executions concurrently."""
|
||||
|
||||
@@ -237,8 +354,11 @@ async def _process_batch(
|
||||
db_client=db_client,
|
||||
user_id=execution.user_id,
|
||||
execution_status=execution.status,
|
||||
model_name=model_name, # Pass model name parameter
|
||||
model_name=request.model_name,
|
||||
skip_feature_flag=True, # Admin endpoint bypasses feature flags
|
||||
system_prompt=request.system_prompt or DEFAULT_SYSTEM_PROMPT,
|
||||
user_prompt=request.user_prompt or DEFAULT_USER_PROMPT,
|
||||
skip_existing=request.skip_existing,
|
||||
)
|
||||
|
||||
if not activity_response:
|
||||
|
||||
@@ -27,7 +27,9 @@ class OttoService:
|
||||
return None
|
||||
|
||||
try:
|
||||
graph = await graph_db.get_graph(request.graph_id, user_id=user_id)
|
||||
graph = await graph_db.get_graph(
|
||||
graph_id=request.graph_id, version=None, user_id=user_id
|
||||
)
|
||||
if not graph:
|
||||
return None
|
||||
|
||||
|
||||
@@ -1343,6 +1343,7 @@ async def get_agent(store_listing_version_id: str) -> GraphModel:
|
||||
graph = await get_graph(
|
||||
graph_id=store_listing_version.agentGraphId,
|
||||
version=store_listing_version.agentGraphVersion,
|
||||
user_id=None,
|
||||
for_export=True,
|
||||
)
|
||||
if not graph:
|
||||
|
||||
@@ -542,7 +542,9 @@ async def generate_image(
|
||||
Returns:
|
||||
JSONResponse: JSON containing the URL of the generated image
|
||||
"""
|
||||
agent = await backend.data.graph.get_graph(agent_id, user_id=user_id)
|
||||
agent = await backend.data.graph.get_graph(
|
||||
graph_id=agent_id, version=None, user_id=user_id
|
||||
)
|
||||
|
||||
if not agent:
|
||||
raise fastapi.HTTPException(
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TurnstileVerifyRequest(BaseModel):
|
||||
"""Request model for verifying a Turnstile token."""
|
||||
|
||||
token: str = Field(description="The Turnstile token to verify")
|
||||
action: Optional[str] = Field(
|
||||
default=None, description="The action that the user is attempting to perform"
|
||||
)
|
||||
|
||||
|
||||
class TurnstileVerifyResponse(BaseModel):
|
||||
"""Response model for the Turnstile verification endpoint."""
|
||||
|
||||
success: bool = Field(description="Whether the token verification was successful")
|
||||
error: Optional[str] = Field(
|
||||
default=None, description="Error message if verification failed"
|
||||
)
|
||||
challenge_timestamp: Optional[str] = Field(
|
||||
default=None, description="Timestamp of the challenge (ISO format)"
|
||||
)
|
||||
hostname: Optional[str] = Field(
|
||||
default=None, description="Hostname of the site where the challenge was solved"
|
||||
)
|
||||
action: Optional[str] = Field(
|
||||
default=None, description="The action associated with this verification"
|
||||
)
|
||||
@@ -1,112 +0,0 @@
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from fastapi import APIRouter
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from .models import TurnstileVerifyRequest, TurnstileVerifyResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter()
|
||||
settings = Settings()
|
||||
|
||||
|
||||
@router.post(
|
||||
"/verify", response_model=TurnstileVerifyResponse, summary="Verify Turnstile Token"
|
||||
)
|
||||
async def verify_turnstile_token(
|
||||
request: TurnstileVerifyRequest,
|
||||
) -> TurnstileVerifyResponse:
|
||||
"""
|
||||
Verify a Cloudflare Turnstile token.
|
||||
This endpoint verifies a token returned by the Cloudflare Turnstile challenge
|
||||
on the client side. It returns whether the verification was successful.
|
||||
"""
|
||||
logger.info(f"Verifying Turnstile token for action: {request.action}")
|
||||
return await verify_token(request)
|
||||
|
||||
|
||||
async def verify_token(request: TurnstileVerifyRequest) -> TurnstileVerifyResponse:
|
||||
"""
|
||||
Verify a Cloudflare Turnstile token by making a request to the Cloudflare API.
|
||||
"""
|
||||
# Get the secret key from settings
|
||||
turnstile_secret_key = settings.secrets.turnstile_secret_key
|
||||
turnstile_verify_url = settings.secrets.turnstile_verify_url
|
||||
|
||||
if not turnstile_secret_key:
|
||||
logger.error(
|
||||
"Turnstile secret key missing. Set TURNSTILE_SECRET_KEY to enable verification."
|
||||
)
|
||||
return TurnstileVerifyResponse(
|
||||
success=False,
|
||||
error="CONFIGURATION_ERROR",
|
||||
challenge_timestamp=None,
|
||||
hostname=None,
|
||||
action=None,
|
||||
)
|
||||
|
||||
try:
|
||||
async with aiohttp.ClientSession() as session:
|
||||
payload = {
|
||||
"secret": turnstile_secret_key,
|
||||
"response": request.token,
|
||||
}
|
||||
|
||||
if request.action:
|
||||
payload["action"] = request.action
|
||||
|
||||
logger.debug(f"Verifying Turnstile token with action: {request.action}")
|
||||
|
||||
async with session.post(
|
||||
turnstile_verify_url,
|
||||
data=payload,
|
||||
timeout=aiohttp.ClientTimeout(total=10),
|
||||
) as response:
|
||||
if response.status != 200:
|
||||
error_text = await response.text()
|
||||
logger.error(f"Turnstile API error: {error_text}")
|
||||
return TurnstileVerifyResponse(
|
||||
success=False,
|
||||
error=f"API_ERROR: {response.status}",
|
||||
challenge_timestamp=None,
|
||||
hostname=None,
|
||||
action=None,
|
||||
)
|
||||
|
||||
data = await response.json()
|
||||
logger.debug(f"Turnstile API response: {data}")
|
||||
|
||||
# Parse the response and return a structured object
|
||||
return TurnstileVerifyResponse(
|
||||
success=data.get("success", False),
|
||||
error=(
|
||||
data.get("error-codes", None)[0]
|
||||
if data.get("error-codes")
|
||||
else None
|
||||
),
|
||||
challenge_timestamp=data.get("challenge_timestamp"),
|
||||
hostname=data.get("hostname"),
|
||||
action=data.get("action"),
|
||||
)
|
||||
|
||||
except aiohttp.ClientError as e:
|
||||
logger.error(f"Connection error to Turnstile API: {str(e)}")
|
||||
return TurnstileVerifyResponse(
|
||||
success=False,
|
||||
error=f"CONNECTION_ERROR: {str(e)}",
|
||||
challenge_timestamp=None,
|
||||
hostname=None,
|
||||
action=None,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in Turnstile verification: {str(e)}")
|
||||
return TurnstileVerifyResponse(
|
||||
success=False,
|
||||
error=f"UNEXPECTED_ERROR: {str(e)}",
|
||||
challenge_timestamp=None,
|
||||
hostname=None,
|
||||
action=None,
|
||||
)
|
||||
@@ -1,32 +0,0 @@
|
||||
import fastapi
|
||||
import fastapi.testclient
|
||||
import pytest_mock
|
||||
|
||||
import backend.server.v2.turnstile.routes as turnstile_routes
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(turnstile_routes.router)
|
||||
|
||||
client = fastapi.testclient.TestClient(app)
|
||||
|
||||
|
||||
def test_verify_turnstile_token_no_secret_key(mocker: pytest_mock.MockFixture) -> None:
|
||||
"""Test token verification without secret key configured"""
|
||||
# Mock the settings with no secret key
|
||||
mock_settings = mocker.patch("backend.server.v2.turnstile.routes.settings")
|
||||
mock_settings.secrets.turnstile_secret_key = None
|
||||
|
||||
request_data = {"token": "test_token", "action": "login"}
|
||||
response = client.post("/verify", json=request_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
response_data = response.json()
|
||||
assert response_data["success"] is False
|
||||
assert response_data["error"] == "CONFIGURATION_ERROR"
|
||||
|
||||
|
||||
def test_verify_turnstile_token_invalid_request() -> None:
|
||||
"""Test token verification with invalid request data"""
|
||||
# Missing token
|
||||
response = client.post("/verify", json={"action": "login"})
|
||||
assert response.status_code == 422
|
||||
@@ -5,13 +5,9 @@ class BlockError(Exception):
|
||||
"""An error occurred during the running of a block"""
|
||||
|
||||
def __init__(self, message: str, block_name: str, block_id: str) -> None:
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
self.block_name = block_name
|
||||
self.block_id = block_id
|
||||
|
||||
def __str__(self):
|
||||
return f"raised by {self.block_name} with message: {self.message}. block_id: {self.block_id}"
|
||||
super().__init__(
|
||||
f"raised by {block_name} with message: {message}. block_id: {block_id}"
|
||||
)
|
||||
|
||||
|
||||
class BlockInputError(BlockError, ValueError):
|
||||
@@ -38,6 +34,10 @@ class NotFoundError(ValueError):
|
||||
"""The requested record was not found, resulting in an error condition"""
|
||||
|
||||
|
||||
class GraphNotFoundError(ValueError):
|
||||
"""The requested Agent Graph was not found, resulting in an error condition"""
|
||||
|
||||
|
||||
class NeedConfirmation(Exception):
|
||||
"""The user must explicitly confirm that they want to proceed"""
|
||||
|
||||
|
||||
@@ -537,16 +537,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
description="The secret key to use for the unsubscribe user by token",
|
||||
)
|
||||
|
||||
# Cloudflare Turnstile credentials
|
||||
turnstile_secret_key: str = Field(
|
||||
default="",
|
||||
description="Cloudflare Turnstile backend secret key",
|
||||
)
|
||||
turnstile_verify_url: str = Field(
|
||||
default="https://challenges.cloudflare.com/turnstile/v0/siteverify",
|
||||
description="Cloudflare Turnstile verify URL",
|
||||
)
|
||||
|
||||
# OAuth server credentials for integrations
|
||||
# --8<-- [start:OAuthServerCredentialsExample]
|
||||
github_client_id: str = Field(default="", description="GitHub OAuth client ID")
|
||||
|
||||
72
autogpt_platform/backend/poetry.lock
generated
72
autogpt_platform/backend/poetry.lock
generated
@@ -1240,14 +1240,14 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"]
|
||||
|
||||
[[package]]
|
||||
name = "faker"
|
||||
version = "37.8.0"
|
||||
version = "38.2.0"
|
||||
description = "Faker is a Python package that generates fake data for you."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "faker-37.8.0-py3-none-any.whl", hash = "sha256:b08233118824423b5fc239f7dd51f145e7018082b4164f8da6a9994e1f1ae793"},
|
||||
{file = "faker-37.8.0.tar.gz", hash = "sha256:090bb5abbec2b30949a95ce1ba6b20d1d0ed222883d63483a0d4be4a970d6fb8"},
|
||||
{file = "faker-38.2.0-py3-none-any.whl", hash = "sha256:35fe4a0a79dee0dc4103a6083ee9224941e7d3594811a50e3969e547b0d2ee65"},
|
||||
{file = "faker-38.2.0.tar.gz", hash = "sha256:20672803db9c7cb97f9b56c18c54b915b6f1d8991f63d1d673642dc43f5ce7ab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4165,14 +4165,14 @@ test = ["betamax (>=0.8,<0.9)", "pytest (>=2.7.3)", "urllib3 (==1.26.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "4.3.0"
|
||||
version = "4.4.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8"},
|
||||
{file = "pre_commit-4.3.0.tar.gz", hash = "sha256:499fe450cc9d42e9d58e606262795ecb64dd05438943c62b66f6a8673da30b16"},
|
||||
{file = "pre_commit-4.4.0-py2.py3-none-any.whl", hash = "sha256:b35ea52957cbf83dcc5d8ee636cbead8624e3a15fbfa61a370e42158ac8a5813"},
|
||||
{file = "pre_commit-4.4.0.tar.gz", hash = "sha256:f0233ebab440e9f17cabbb558706eb173d19ace965c68cdce2c081042b4fab15"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4913,14 +4913,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.406"
|
||||
version = "1.1.407"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pyright-1.1.406-py3-none-any.whl", hash = "sha256:1d81fb43c2407bf566e97e57abb01c811973fdb21b2df8df59f870f688bdca71"},
|
||||
{file = "pyright-1.1.406.tar.gz", hash = "sha256:c4872bc58c9643dac09e8a2e74d472c62036910b3bd37a32813989ef7576ea2c"},
|
||||
{file = "pyright-1.1.407-py3-none-any.whl", hash = "sha256:6dd419f54fcc13f03b52285796d65e639786373f433e243f8b94cf93a7444d21"},
|
||||
{file = "pyright-1.1.407.tar.gz", hash = "sha256:099674dba5c10489832d4a4b2d302636152a9a42d317986c38474c76fe562262"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5765,31 +5765,31 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.13.3"
|
||||
version = "0.14.5"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.13.3-py3-none-linux_armv6l.whl", hash = "sha256:311860a4c5e19189c89d035638f500c1e191d283d0cc2f1600c8c80d6dcd430c"},
|
||||
{file = "ruff-0.13.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2bdad6512fb666b40fcadb65e33add2b040fc18a24997d2e47fee7d66f7fcae2"},
|
||||
{file = "ruff-0.13.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fc6fa4637284708d6ed4e5e970d52fc3b76a557d7b4e85a53013d9d201d93286"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c9e6469864f94a98f412f20ea143d547e4c652f45e44f369d7b74ee78185838"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bf62b705f319476c78891e0e97e965b21db468b3c999086de8ffb0d40fd2822"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78cc1abed87ce40cb07ee0667ce99dbc766c9f519eabfd948ed87295d8737c60"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4fb75e7c402d504f7a9a259e0442b96403fa4a7310ffe3588d11d7e170d2b1e3"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17b951f9d9afb39330b2bdd2dd144ce1c1335881c277837ac1b50bfd99985ed3"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6052f8088728898e0a449f0dde8fafc7ed47e4d878168b211977e3e7e854f662"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc742c50f4ba72ce2a3be362bd359aef7d0d302bf7637a6f942eaa763bd292af"},
|
||||
{file = "ruff-0.13.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:8e5640349493b378431637019366bbd73c927e515c9c1babfea3e932f5e68e1d"},
|
||||
{file = "ruff-0.13.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b139f638a80eae7073c691a5dd8d581e0ba319540be97c343d60fb12949c8d0"},
|
||||
{file = "ruff-0.13.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:6b547def0a40054825de7cfa341039ebdfa51f3d4bfa6a0772940ed351d2746c"},
|
||||
{file = "ruff-0.13.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9cc48a3564423915c93573f1981d57d101e617839bef38504f85f3677b3a0a3e"},
|
||||
{file = "ruff-0.13.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1a993b17ec03719c502881cb2d5f91771e8742f2ca6de740034433a97c561989"},
|
||||
{file = "ruff-0.13.3-py3-none-win32.whl", hash = "sha256:f14e0d1fe6460f07814d03c6e32e815bff411505178a1f539a38f6097d3e8ee3"},
|
||||
{file = "ruff-0.13.3-py3-none-win_amd64.whl", hash = "sha256:621e2e5812b691d4f244638d693e640f188bacbb9bc793ddd46837cea0503dd2"},
|
||||
{file = "ruff-0.13.3-py3-none-win_arm64.whl", hash = "sha256:9e9e9d699841eaf4c2c798fa783df2fabc680b72059a02ca0ed81c460bc58330"},
|
||||
{file = "ruff-0.13.3.tar.gz", hash = "sha256:5b0ba0db740eefdfbcce4299f49e9eaefc643d4d007749d77d047c2bab19908e"},
|
||||
{file = "ruff-0.14.5-py3-none-linux_armv6l.whl", hash = "sha256:f3b8248123b586de44a8018bcc9fefe31d23dda57a34e6f0e1e53bd51fd63594"},
|
||||
{file = "ruff-0.14.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:f7a75236570318c7a30edd7f5491945f0169de738d945ca8784500b517163a72"},
|
||||
{file = "ruff-0.14.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6d146132d1ee115f8802356a2dc9a634dbf58184c51bff21f313e8cd1c74899a"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2380596653dcd20b057794d55681571a257a42327da8894b93bbd6111aa801f"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d1fa985a42b1f075a098fa1ab9d472b712bdb17ad87a8ec86e45e7fa6273e68"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88f0770d42b7fa02bbefddde15d235ca3aa24e2f0137388cc15b2dcbb1f7c7a7"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3676cb02b9061fee7294661071c4709fa21419ea9176087cb77e64410926eb78"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b595bedf6bc9cab647c4a173a61acf4f1ac5f2b545203ba82f30fcb10b0318fb"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f55382725ad0bdb2e8ee2babcbbfb16f124f5a59496a2f6a46f1d9d99d93e6e2"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7497d19dce23976bdaca24345ae131a1d38dcfe1b0850ad8e9e6e4fa321a6e19"},
|
||||
{file = "ruff-0.14.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:410e781f1122d6be4f446981dd479470af86537fb0b8857f27a6e872f65a38e4"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c01be527ef4c91a6d55e53b337bfe2c0f82af024cc1a33c44792d6844e2331e1"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f66e9bb762e68d66e48550b59c74314168ebb46199886c5c5aa0b0fbcc81b151"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:d93be8f1fa01022337f1f8f3bcaa7ffee2d0b03f00922c45c2207954f351f465"},
|
||||
{file = "ruff-0.14.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c135d4b681f7401fe0e7312017e41aba9b3160861105726b76cfa14bc25aa367"},
|
||||
{file = "ruff-0.14.5-py3-none-win32.whl", hash = "sha256:c83642e6fccfb6dea8b785eb9f456800dcd6a63f362238af5fc0c83d027dd08b"},
|
||||
{file = "ruff-0.14.5-py3-none-win_amd64.whl", hash = "sha256:9d55d7af7166f143c94eae1db3312f9ea8f95a4defef1979ed516dbb38c27621"},
|
||||
{file = "ruff-0.14.5-py3-none-win_arm64.whl", hash = "sha256:4b700459d4649e2594b31f20a9de33bc7c19976d4746d8d0798ad959621d64a4"},
|
||||
{file = "ruff-0.14.5.tar.gz", hash = "sha256:8d3b48d7d8aad423d3137af7ab6c8b1e38e4de104800f0d596990f6ada1a9fc1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5823,14 +5823,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.42.1"
|
||||
version = "2.44.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "sentry_sdk-2.42.1-py2.py3-none-any.whl", hash = "sha256:f8716b50c927d3beb41bc88439dc6bcd872237b596df5b14613e2ade104aee02"},
|
||||
{file = "sentry_sdk-2.42.1.tar.gz", hash = "sha256:8598cc6edcfe74cb8074ba6a7c15338cdee93d63d3eb9b9943b4b568354ad5b6"},
|
||||
{file = "sentry_sdk-2.44.0-py2.py3-none-any.whl", hash = "sha256:9e36a0372b881e8f92fdbff4564764ce6cec4b7f25424d0a3a8d609c9e4651a7"},
|
||||
{file = "sentry_sdk-2.44.0.tar.gz", hash = "sha256:5b1fe54dfafa332e900b07dd8f4dfe35753b64e78e7d9b1655a28fd3065e2493"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5870,11 +5870,13 @@ launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"]
|
||||
litellm = ["litellm (>=1.77.5)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
loguru = ["loguru (>=0.5)"]
|
||||
mcp = ["mcp (>=1.15.0)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
openfeature = ["openfeature-sdk (>=0.7.1)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||
pure-eval = ["asttokens", "executing", "pure_eval"]
|
||||
pydantic-ai = ["pydantic-ai (>=1.0.0)"]
|
||||
pymongo = ["pymongo (>=3.1)"]
|
||||
pyspark = ["pyspark (>=2.4.4)"]
|
||||
quart = ["blinker (>=1.1)", "quart (>=0.16.1)"]
|
||||
@@ -7277,4 +7279,4 @@ cffi = ["cffi (>=1.11)"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "4d7134993527a5ff91b531a4e28b36bcab7cef2db18cf00702a950e34ae9ea1d"
|
||||
content-hash = "13b191b2a1989d3321ff713c66ff6f5f4f3b82d15df4d407e0e5dbf87d7522c4"
|
||||
|
||||
@@ -58,7 +58,7 @@ python-multipart = "^0.0.20"
|
||||
redis = "^6.2.0"
|
||||
regex = "^2025.9.18"
|
||||
replicate = "^1.0.6"
|
||||
sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlalchemy"], version = "^2.33.2"}
|
||||
sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlalchemy"], version = "^2.44.0"}
|
||||
sqlalchemy = "^2.0.40"
|
||||
strenum = "^0.4.9"
|
||||
stripe = "^11.5.0"
|
||||
@@ -86,16 +86,16 @@ stagehand = "^0.5.1"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aiohappyeyeballs = "^2.6.1"
|
||||
black = "^24.10.0"
|
||||
faker = "^37.8.0"
|
||||
faker = "^38.2.0"
|
||||
httpx = "^0.28.1"
|
||||
isort = "^5.13.2"
|
||||
poethepoet = "^0.37.0"
|
||||
pre-commit = "^4.3.0"
|
||||
pyright = "^1.1.406"
|
||||
pre-commit = "^4.4.0"
|
||||
pyright = "^1.1.407"
|
||||
pytest-mock = "^3.15.1"
|
||||
pytest-watcher = "^0.4.2"
|
||||
requests = "^2.32.5"
|
||||
ruff = "^0.13.3"
|
||||
ruff = "^0.14.5"
|
||||
# NOTE: please insert new dependencies in their alphabetical location
|
||||
|
||||
[build-system]
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
# Supabase
|
||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_FRONTEND_BASE_URL=http://localhost:3000
|
||||
# Back-end services
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
NEXT_PUBLIC_APP_ENV=local
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
# Env config
|
||||
NEXT_PUBLIC_APP_ENV=local
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=687ab1372f497809b131e06e
|
||||
# Feature flags
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=687ab1372f497809b131e06e
|
||||
|
||||
NEXT_PUBLIC_TURNSTILE=disabled
|
||||
NEXT_PUBLIC_REACT_QUERY_DEVTOOL=true
|
||||
|
||||
NEXT_PUBLIC_GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
# Debugging
|
||||
NEXT_PUBLIC_REACT_QUERY_DEVTOOL=true
|
||||
NEXT_PUBLIC_GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
|
||||
# Google Drive Picker
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID=
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY=
|
||||
NEXT_PUBLIC_GOOGLE_APP_ID=
|
||||
|
||||
|
||||
# Cloudflare CAPTCHA
|
||||
NEXT_PUBLIC_CLOUDFLARE_TURNSTILE_SITE_KEY=
|
||||
NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# PR previews
|
||||
NEXT_PUBLIC_PREVIEW_STEALING_DEV=
|
||||
@@ -34,7 +34,8 @@ const nextConfig = {
|
||||
},
|
||||
],
|
||||
},
|
||||
output: "standalone",
|
||||
// Vercel has its own deployment mechanism and doesn't need standalone mode
|
||||
...(process.env.VERCEL ? {} : { output: "standalone" }),
|
||||
transpilePackages: ["geist"],
|
||||
};
|
||||
|
||||
@@ -80,10 +81,10 @@ export default isDevelopmentBuild
|
||||
|
||||
// This helps Sentry with sourcemaps... https://docs.sentry.io/platforms/javascript/guides/nextjs/sourcemaps/
|
||||
sourcemaps: {
|
||||
disable: false, // Source maps are enabled by default
|
||||
assets: ["**/*.js", "**/*.js.map"], // Specify which files to upload
|
||||
ignore: ["**/node_modules/**"], // Files to exclude
|
||||
deleteSourcemapsAfterUpload: true, // Security: delete after upload
|
||||
disable: false,
|
||||
assets: [".next/**/*.js", ".next/**/*.js.map"],
|
||||
ignore: ["**/node_modules/**"],
|
||||
deleteSourcemapsAfterUpload: false, // Source is public anyway :)
|
||||
},
|
||||
|
||||
// Automatically tree-shake Sentry logger statements to reduce bundle size
|
||||
|
||||
@@ -30,7 +30,6 @@
|
||||
"dependencies": {
|
||||
"@faker-js/faker": "10.0.0",
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@marsidev/react-turnstile": "1.3.1",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
@@ -55,7 +54,7 @@
|
||||
"@rjsf/core": "5.24.13",
|
||||
"@rjsf/utils": "5.24.13",
|
||||
"@rjsf/validator-ajv8": "5.24.13",
|
||||
"@sentry/nextjs": "10.22.0",
|
||||
"@sentry/nextjs": "10.27.0",
|
||||
"@supabase/ssr": "0.7.0",
|
||||
"@supabase/supabase-js": "2.78.0",
|
||||
"@tanstack/react-query": "5.90.6",
|
||||
@@ -135,7 +134,7 @@
|
||||
"axe-playwright": "2.2.2",
|
||||
"chromatic": "13.3.3",
|
||||
"concurrently": "9.2.1",
|
||||
"cross-env": "7.0.3",
|
||||
"cross-env": "10.1.0",
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.5.2",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
|
||||
@@ -8,9 +8,7 @@ import dotenv from "dotenv";
|
||||
import path from "path";
|
||||
dotenv.config({ path: path.resolve(__dirname, ".env") });
|
||||
dotenv.config({ path: path.resolve(__dirname, "../backend/.env") });
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
|
||||
export default defineConfig({
|
||||
testDir: "./src/tests",
|
||||
/* Global setup file that runs before all tests */
|
||||
@@ -62,7 +60,7 @@ export default defineConfig({
|
||||
/* Maximum time one test can run for */
|
||||
timeout: 25000,
|
||||
|
||||
/* Configure web server to start automatically */
|
||||
/* Configure web server to start automatically (local dev only) */
|
||||
webServer: {
|
||||
command: "pnpm start",
|
||||
url: "http://localhost:3000",
|
||||
|
||||
771
autogpt_platform/frontend/pnpm-lock.yaml
generated
771
autogpt_platform/frontend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
BIN
autogpt_platform/frontend/public/favicon-dev.ico
Normal file
BIN
autogpt_platform/frontend/public/favicon-dev.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
BIN
autogpt_platform/frontend/public/favicon-local.ico
Normal file
BIN
autogpt_platform/frontend/public/favicon-local.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
@@ -1,4 +1,11 @@
|
||||
"use client";
|
||||
import { StoreAgentDetails } from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { isEmptyOrWhitespace } from "@/lib/utils";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useOnboarding } from "../../../../providers/onboarding/onboarding-provider";
|
||||
import OnboardingAgentCard from "../components/OnboardingAgentCard";
|
||||
import OnboardingButton from "../components/OnboardingButton";
|
||||
import {
|
||||
OnboardingFooter,
|
||||
@@ -6,28 +13,22 @@ import {
|
||||
OnboardingStep,
|
||||
} from "../components/OnboardingStep";
|
||||
import { OnboardingText } from "../components/OnboardingText";
|
||||
import OnboardingAgentCard from "../components/OnboardingAgentCard";
|
||||
import { useEffect, useState } from "react";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { StoreAgentDetails } from "@/lib/autogpt-server-api";
|
||||
import { isEmptyOrWhitespace } from "@/lib/utils";
|
||||
import { useOnboarding } from "../../../../providers/onboarding/onboarding-provider";
|
||||
import { finishOnboarding } from "../6-congrats/actions";
|
||||
|
||||
export default function Page() {
|
||||
const { state, updateState } = useOnboarding(4, "INTEGRATIONS");
|
||||
const { state, updateState, completeStep } = useOnboarding(4, "INTEGRATIONS");
|
||||
const [agents, setAgents] = useState<StoreAgentDetails[]>([]);
|
||||
const api = useBackendAPI();
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
api.getOnboardingAgents().then((agents) => {
|
||||
if (agents.length < 2) {
|
||||
finishOnboarding();
|
||||
completeStep("CONGRATS");
|
||||
router.replace("/");
|
||||
}
|
||||
|
||||
setAgents(agents);
|
||||
});
|
||||
}, [api, setAgents]);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
// Deselect agent if it's not in the list of agents
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"use server";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export async function finishOnboarding() {
|
||||
const api = new BackendAPI();
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
const listingId = onboarding?.selectedStoreListingVersionId;
|
||||
if (listingId) {
|
||||
const libraryAgent = await api.addMarketplaceAgentToLibrary(listingId);
|
||||
revalidatePath(`/library/agents/${libraryAgent.id}`, "layout");
|
||||
redirect(`/library/agents/${libraryAgent.id}`);
|
||||
} else {
|
||||
revalidatePath("/library", "layout");
|
||||
redirect("/library");
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,15 @@
|
||||
"use client";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { finishOnboarding } from "./actions";
|
||||
import { useOnboarding } from "../../../../providers/onboarding/onboarding-provider";
|
||||
import { useRouter } from "next/navigation";
|
||||
import * as party from "party-js";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useOnboarding } from "../../../../providers/onboarding/onboarding-provider";
|
||||
|
||||
export default function Page() {
|
||||
const { completeStep } = useOnboarding(7, "AGENT_INPUT");
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
const [showText, setShowText] = useState(false);
|
||||
const [showSubtext, setShowSubtext] = useState(false);
|
||||
const divRef = useRef(null);
|
||||
@@ -30,9 +33,28 @@ export default function Page() {
|
||||
setShowSubtext(true);
|
||||
}, 500);
|
||||
|
||||
const timer2 = setTimeout(() => {
|
||||
const timer2 = setTimeout(async () => {
|
||||
completeStep("CONGRATS");
|
||||
finishOnboarding();
|
||||
|
||||
try {
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
if (onboarding?.selectedStoreListingVersionId) {
|
||||
try {
|
||||
const libraryAgent = await api.addMarketplaceAgentToLibrary(
|
||||
onboarding.selectedStoreListingVersionId,
|
||||
);
|
||||
router.replace(`/library/agents/${libraryAgent.id}`);
|
||||
} catch (error) {
|
||||
console.error("Failed to add agent to library:", error);
|
||||
router.replace("/library");
|
||||
}
|
||||
} else {
|
||||
router.replace("/library");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to get onboarding data:", error);
|
||||
router.replace("/library");
|
||||
}
|
||||
}, 3000);
|
||||
|
||||
return () => {
|
||||
@@ -40,7 +62,7 @@ export default function Page() {
|
||||
clearTimeout(timer1);
|
||||
clearTimeout(timer2);
|
||||
};
|
||||
}, []);
|
||||
}, [completeStep, router, api]);
|
||||
|
||||
return (
|
||||
<div className="flex h-screen w-screen flex-col items-center justify-center bg-violet-100">
|
||||
|
||||
@@ -1,37 +1,72 @@
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { redirect } from "next/navigation";
|
||||
import { finishOnboarding } from "./6-congrats/actions";
|
||||
import { shouldShowOnboarding } from "@/app/api/helpers";
|
||||
"use client";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
|
||||
// Force dynamic rendering to avoid static generation issues with cookies
|
||||
export const dynamic = "force-dynamic";
|
||||
export default function OnboardingPage() {
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
|
||||
export default async function OnboardingPage() {
|
||||
const api = new BackendAPI();
|
||||
const isOnboardingEnabled = await shouldShowOnboarding();
|
||||
useEffect(() => {
|
||||
async function redirectToStep() {
|
||||
try {
|
||||
// Check if onboarding is enabled
|
||||
const isEnabled = await api.isOnboardingEnabled();
|
||||
if (!isEnabled) {
|
||||
router.replace("/");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isOnboardingEnabled) {
|
||||
redirect("/marketplace");
|
||||
}
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
// Handle completed onboarding
|
||||
if (onboarding.completedSteps.includes("GET_RESULTS")) {
|
||||
router.replace("/");
|
||||
return;
|
||||
}
|
||||
|
||||
// CONGRATS is the last step in intro onboarding
|
||||
if (onboarding.completedSteps.includes("GET_RESULTS"))
|
||||
redirect("/marketplace");
|
||||
else if (onboarding.completedSteps.includes("CONGRATS")) finishOnboarding();
|
||||
else if (onboarding.completedSteps.includes("AGENT_INPUT"))
|
||||
redirect("/onboarding/5-run");
|
||||
else if (onboarding.completedSteps.includes("AGENT_NEW_RUN"))
|
||||
redirect("/onboarding/5-run");
|
||||
else if (onboarding.completedSteps.includes("AGENT_CHOICE"))
|
||||
redirect("/onboarding/5-run");
|
||||
else if (onboarding.completedSteps.includes("INTEGRATIONS"))
|
||||
redirect("/onboarding/4-agent");
|
||||
else if (onboarding.completedSteps.includes("USAGE_REASON"))
|
||||
redirect("/onboarding/3-services");
|
||||
else if (onboarding.completedSteps.includes("WELCOME"))
|
||||
redirect("/onboarding/2-reason");
|
||||
// Redirect to appropriate step based on completed steps
|
||||
if (onboarding.completedSteps.includes("AGENT_INPUT")) {
|
||||
router.push("/onboarding/5-run");
|
||||
return;
|
||||
}
|
||||
|
||||
redirect("/onboarding/1-welcome");
|
||||
if (onboarding.completedSteps.includes("AGENT_NEW_RUN")) {
|
||||
router.push("/onboarding/5-run");
|
||||
return;
|
||||
}
|
||||
|
||||
if (onboarding.completedSteps.includes("AGENT_CHOICE")) {
|
||||
router.push("/onboarding/5-run");
|
||||
return;
|
||||
}
|
||||
|
||||
if (onboarding.completedSteps.includes("INTEGRATIONS")) {
|
||||
router.push("/onboarding/4-agent");
|
||||
return;
|
||||
}
|
||||
|
||||
if (onboarding.completedSteps.includes("USAGE_REASON")) {
|
||||
router.push("/onboarding/3-services");
|
||||
return;
|
||||
}
|
||||
|
||||
if (onboarding.completedSteps.includes("WELCOME")) {
|
||||
router.push("/onboarding/2-reason");
|
||||
return;
|
||||
}
|
||||
|
||||
// Default: redirect to first step
|
||||
router.push("/onboarding/1-welcome");
|
||||
} catch (error) {
|
||||
console.error("Failed to determine onboarding step:", error);
|
||||
router.replace("/");
|
||||
}
|
||||
}
|
||||
|
||||
redirectToStep();
|
||||
}, [api, router]);
|
||||
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
|
||||
@@ -2,11 +2,12 @@
|
||||
import { postV1ResetOnboardingProgress } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { redirect } from "next/navigation";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
|
||||
export default function OnboardingResetPage() {
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
postV1ResetOnboardingProgress()
|
||||
@@ -17,7 +18,7 @@ export default function OnboardingResetPage() {
|
||||
variant: "success",
|
||||
});
|
||||
|
||||
redirect("/onboarding/1-welcome");
|
||||
router.push("/onboarding");
|
||||
})
|
||||
.catch(() => {
|
||||
toast({
|
||||
@@ -26,7 +27,7 @@ export default function OnboardingResetPage() {
|
||||
variant: "destructive",
|
||||
});
|
||||
});
|
||||
}, []);
|
||||
}, [toast, router]);
|
||||
|
||||
return <LoadingSpinner cover />;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { useState, useEffect } from "react";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import { Label } from "@/components/__legacy__/ui/label";
|
||||
@@ -11,36 +11,37 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/__legacy__/ui/select";
|
||||
import { Textarea } from "@/components/__legacy__/ui/textarea";
|
||||
import { Checkbox } from "@/components/__legacy__/ui/checkbox";
|
||||
import { Collapsible } from "@/components/molecules/Collapsible/Collapsible";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { usePostV2GenerateExecutionAnalytics } from "@/app/api/__generated__/endpoints/admin/admin";
|
||||
import {
|
||||
usePostV2GenerateExecutionAnalytics,
|
||||
useGetV2GetExecutionAnalyticsConfiguration,
|
||||
} from "@/app/api/__generated__/endpoints/admin/admin";
|
||||
import type { ExecutionAnalyticsRequest } from "@/app/api/__generated__/models/executionAnalyticsRequest";
|
||||
import type { ExecutionAnalyticsResponse } from "@/app/api/__generated__/models/executionAnalyticsResponse";
|
||||
|
||||
// Local interface for form state to simplify handling
|
||||
interface FormData {
|
||||
graph_id: string;
|
||||
graph_version?: number;
|
||||
user_id?: string;
|
||||
created_after?: string;
|
||||
model_name: string;
|
||||
batch_size: number;
|
||||
// Use the generated type with minimal adjustment for form handling
|
||||
interface FormData extends Omit<ExecutionAnalyticsRequest, "created_after"> {
|
||||
created_after?: string; // Keep as string for datetime-local input
|
||||
// All other fields use the generated types as-is
|
||||
}
|
||||
import { AnalyticsResultsTable } from "./AnalyticsResultsTable";
|
||||
|
||||
const MODEL_OPTIONS = [
|
||||
{ value: "gpt-4o-mini", label: "GPT-4o Mini (Recommended)" },
|
||||
{ value: "gpt-4o", label: "GPT-4o" },
|
||||
{ value: "gpt-4-turbo", label: "GPT-4 Turbo" },
|
||||
{ value: "gpt-4.1", label: "GPT-4.1" },
|
||||
{ value: "gpt-4.1-mini", label: "GPT-4.1 Mini" },
|
||||
];
|
||||
|
||||
export function ExecutionAnalyticsForm() {
|
||||
const [results, setResults] = useState<ExecutionAnalyticsResponse | null>(
|
||||
null,
|
||||
);
|
||||
const { toast } = useToast();
|
||||
|
||||
// Fetch configuration from API
|
||||
const {
|
||||
data: config,
|
||||
isLoading: configLoading,
|
||||
error: configError,
|
||||
} = useGetV2GetExecutionAnalyticsConfiguration();
|
||||
|
||||
const generateAnalytics = usePostV2GenerateExecutionAnalytics({
|
||||
mutation: {
|
||||
onSuccess: (res) => {
|
||||
@@ -69,10 +70,23 @@ export function ExecutionAnalyticsForm() {
|
||||
|
||||
const [formData, setFormData] = useState<FormData>({
|
||||
graph_id: "",
|
||||
model_name: "gpt-4o-mini",
|
||||
model_name: "", // Will be set from config
|
||||
batch_size: 10, // Fixed internal value
|
||||
skip_existing: true, // Default to skip existing
|
||||
system_prompt: "", // Will use config default when empty
|
||||
user_prompt: "", // Will use config default when empty
|
||||
});
|
||||
|
||||
// Update form defaults when config loads
|
||||
useEffect(() => {
|
||||
if (config?.data && config.status === 200 && !formData.model_name) {
|
||||
setFormData((prev) => ({
|
||||
...prev,
|
||||
model_name: config.data.recommended_model,
|
||||
}));
|
||||
}
|
||||
}, [config, formData.model_name]);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
|
||||
@@ -92,6 +106,7 @@ export function ExecutionAnalyticsForm() {
|
||||
graph_id: formData.graph_id.trim(),
|
||||
model_name: formData.model_name,
|
||||
batch_size: formData.batch_size,
|
||||
skip_existing: formData.skip_existing,
|
||||
};
|
||||
|
||||
if (formData.graph_version) {
|
||||
@@ -110,6 +125,14 @@ export function ExecutionAnalyticsForm() {
|
||||
payload.created_after = new Date(formData.created_after.trim());
|
||||
}
|
||||
|
||||
if (formData.system_prompt?.trim()) {
|
||||
payload.system_prompt = formData.system_prompt.trim();
|
||||
}
|
||||
|
||||
if (formData.user_prompt?.trim()) {
|
||||
payload.user_prompt = formData.user_prompt.trim();
|
||||
}
|
||||
|
||||
generateAnalytics.mutate({ data: payload });
|
||||
};
|
||||
|
||||
@@ -117,6 +140,26 @@ export function ExecutionAnalyticsForm() {
|
||||
setFormData((prev: FormData) => ({ ...prev, [field]: value }));
|
||||
};
|
||||
|
||||
// Show loading state while config loads
|
||||
if (configLoading) {
|
||||
return (
|
||||
<div className="flex items-center justify-center py-8">
|
||||
<div className="text-gray-500">Loading configuration...</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// Show error state if config fails to load
|
||||
if (configError || !config?.data || config.status !== 200) {
|
||||
return (
|
||||
<div className="flex items-center justify-center py-8">
|
||||
<div className="text-red-500">Failed to load configuration</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const configData = config.data;
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
<form onSubmit={handleSubmit} className="space-y-4">
|
||||
@@ -182,9 +225,9 @@ export function ExecutionAnalyticsForm() {
|
||||
<SelectValue placeholder="Select AI model" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{MODEL_OPTIONS.map((option) => (
|
||||
<SelectItem key={option.value} value={option.value}>
|
||||
{option.label}
|
||||
{configData.available_models.map((model) => (
|
||||
<SelectItem key={model.value} value={model.value}>
|
||||
{model.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
@@ -192,6 +235,127 @@ export function ExecutionAnalyticsForm() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Advanced Options Section - Collapsible */}
|
||||
<div className="border-t pt-6">
|
||||
<Collapsible
|
||||
trigger={
|
||||
<h3 className="text-lg font-semibold text-gray-700">
|
||||
Advanced Options
|
||||
</h3>
|
||||
}
|
||||
defaultOpen={false}
|
||||
className="space-y-4"
|
||||
>
|
||||
<div className="space-y-4 pt-4">
|
||||
{/* Skip Existing Checkbox */}
|
||||
<div className="flex items-center space-x-2">
|
||||
<Checkbox
|
||||
id="skip_existing"
|
||||
checked={formData.skip_existing}
|
||||
onCheckedChange={(checked) =>
|
||||
handleInputChange("skip_existing", checked)
|
||||
}
|
||||
/>
|
||||
<Label htmlFor="skip_existing" className="text-sm">
|
||||
Skip executions that already have activity status and
|
||||
correctness score
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Custom System Prompt */}
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="system_prompt">
|
||||
Custom System Prompt (Optional)
|
||||
</Label>
|
||||
<Textarea
|
||||
id="system_prompt"
|
||||
value={formData.system_prompt || ""}
|
||||
onChange={(e) =>
|
||||
handleInputChange("system_prompt", e.target.value)
|
||||
}
|
||||
placeholder={configData.default_system_prompt}
|
||||
rows={6}
|
||||
className="resize-y"
|
||||
/>
|
||||
<p className="text-sm text-gray-600">
|
||||
Customize how the AI evaluates execution success and failure.
|
||||
Leave empty to use the default prompt shown above.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Custom User Prompt */}
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="user_prompt">
|
||||
Custom User Prompt Template (Optional)
|
||||
</Label>
|
||||
<Textarea
|
||||
id="user_prompt"
|
||||
value={formData.user_prompt || ""}
|
||||
onChange={(e) =>
|
||||
handleInputChange("user_prompt", e.target.value)
|
||||
}
|
||||
placeholder={configData.default_user_prompt}
|
||||
rows={8}
|
||||
className="resize-y"
|
||||
/>
|
||||
<p className="text-sm text-gray-600">
|
||||
Customize the analysis instructions. Use{" "}
|
||||
<code className="rounded bg-gray-100 px-1">
|
||||
{"{{GRAPH_NAME}}"}
|
||||
</code>{" "}
|
||||
and{" "}
|
||||
<code className="rounded bg-gray-100 px-1">
|
||||
{"{{EXECUTION_DATA}}"}
|
||||
</code>{" "}
|
||||
as placeholders. Leave empty to use the default template shown
|
||||
above.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Quick Actions */}
|
||||
<div className="flex flex-wrap gap-2 border-t pt-4">
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => {
|
||||
handleInputChange(
|
||||
"system_prompt",
|
||||
configData.default_system_prompt,
|
||||
);
|
||||
}}
|
||||
>
|
||||
Reset System Prompt
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => {
|
||||
handleInputChange(
|
||||
"user_prompt",
|
||||
configData.default_user_prompt,
|
||||
);
|
||||
}}
|
||||
>
|
||||
Reset User Prompt
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={() => {
|
||||
handleInputChange("system_prompt", "");
|
||||
handleInputChange("user_prompt", "");
|
||||
}}
|
||||
>
|
||||
Clear All Prompts
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</Collapsible>
|
||||
</div>
|
||||
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
variant="primary"
|
||||
|
||||
@@ -1,13 +1,20 @@
|
||||
import { parseAsString, useQueryStates } from "nuqs";
|
||||
import { AgentOutputs } from "./components/AgentOutputs/AgentOutputs";
|
||||
import { RunGraph } from "./components/RunGraph/RunGraph";
|
||||
import { ScheduleGraph } from "./components/ScheduleGraph/ScheduleGraph";
|
||||
import { memo } from "react";
|
||||
|
||||
export const BuilderActions = () => {
|
||||
export const BuilderActions = memo(() => {
|
||||
const [{ flowID }] = useQueryStates({
|
||||
flowID: parseAsString,
|
||||
});
|
||||
return (
|
||||
<div className="absolute bottom-4 left-[50%] z-[100] flex -translate-x-1/2 items-center gap-2 gap-4">
|
||||
<AgentOutputs />
|
||||
<RunGraph />
|
||||
<ScheduleGraph />
|
||||
<div className="absolute bottom-4 left-[50%] z-[100] flex -translate-x-1/2 items-center gap-4 rounded-full bg-white p-2 px-2 shadow-lg">
|
||||
<AgentOutputs flowID={flowID} />
|
||||
<RunGraph flowID={flowID} />
|
||||
<ScheduleGraph flowID={flowID} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
});
|
||||
|
||||
BuilderActions.displayName = "BuilderActions";
|
||||
|
||||
@@ -1,32 +1,141 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import { LogOutIcon } from "lucide-react";
|
||||
import {
|
||||
Sheet,
|
||||
SheetContent,
|
||||
SheetDescription,
|
||||
SheetHeader,
|
||||
SheetTitle,
|
||||
SheetTrigger,
|
||||
} from "@/components/__legacy__/ui/sheet";
|
||||
import { BuilderActionButton } from "../BuilderActionButton";
|
||||
import { BookOpenIcon } from "@phosphor-icons/react";
|
||||
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { BlockUIType } from "@/app/(platform)/build/components/types";
|
||||
import { ScrollArea } from "@/components/__legacy__/ui/scroll-area";
|
||||
import { Label } from "@/components/__legacy__/ui/label";
|
||||
import { useMemo } from "react";
|
||||
import {
|
||||
globalRegistry,
|
||||
OutputItem,
|
||||
OutputActions,
|
||||
} from "@/app/(platform)/library/agents/[id]/components/AgentRunsView/components/OutputRenderers";
|
||||
|
||||
export const AgentOutputs = ({ flowID }: { flowID: string | null }) => {
|
||||
const hasOutputs = useGraphStore(useShallow((state) => state.hasOutputs));
|
||||
const nodes = useNodeStore(useShallow((state) => state.nodes));
|
||||
|
||||
const outputs = useMemo(() => {
|
||||
const outputNodes = nodes.filter(
|
||||
(node) => node.data.uiType === BlockUIType.OUTPUT,
|
||||
);
|
||||
|
||||
return outputNodes
|
||||
.map((node) => {
|
||||
const executionResult = node.data.nodeExecutionResult;
|
||||
const outputData = executionResult?.output_data?.output;
|
||||
|
||||
const renderer = globalRegistry.getRenderer(outputData);
|
||||
|
||||
return {
|
||||
metadata: {
|
||||
name: node.data.hardcodedValues?.name || "Output",
|
||||
description:
|
||||
node.data.hardcodedValues?.description || "Output from the agent",
|
||||
},
|
||||
value: outputData ?? "No output yet",
|
||||
renderer,
|
||||
};
|
||||
})
|
||||
.filter(
|
||||
(
|
||||
output,
|
||||
): output is typeof output & {
|
||||
renderer: NonNullable<typeof output.renderer>;
|
||||
} => output.renderer !== null,
|
||||
);
|
||||
}, [nodes]);
|
||||
|
||||
const actionItems = useMemo(() => {
|
||||
return outputs.map((output) => ({
|
||||
value: output.value,
|
||||
metadata: {},
|
||||
renderer: output.renderer,
|
||||
}));
|
||||
}, [outputs]);
|
||||
|
||||
export const AgentOutputs = () => {
|
||||
return (
|
||||
<>
|
||||
<Sheet>
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
{/* Todo: Implement Agent Outputs */}
|
||||
<Button
|
||||
variant="primary"
|
||||
size="large"
|
||||
className={"relative min-w-0 border-none text-lg"}
|
||||
>
|
||||
<LogOutIcon className="size-6" />
|
||||
</Button>
|
||||
<SheetTrigger asChild>
|
||||
<BuilderActionButton disabled={!flowID || !hasOutputs()}>
|
||||
<BookOpenIcon className="size-6" />
|
||||
</BuilderActionButton>
|
||||
</SheetTrigger>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Agent Outputs</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</>
|
||||
<SheetContent className="flex h-full w-full flex-col overflow-hidden sm:max-w-[600px]">
|
||||
<SheetHeader className="px-2 py-2">
|
||||
<div className="flex items-center justify-between">
|
||||
<div>
|
||||
<SheetTitle className="text-xl">Run Outputs</SheetTitle>
|
||||
<SheetDescription className="mt-1 text-sm text-muted-foreground">
|
||||
<span className="inline-flex items-center gap-1.5">
|
||||
<span className="rounded-md bg-yellow-100 px-2 py-0.5 text-xs font-medium text-yellow-800 dark:bg-yellow-900/30 dark:text-yellow-400">
|
||||
Beta
|
||||
</span>
|
||||
<span>This feature is in beta and may contain bugs</span>
|
||||
</span>
|
||||
</SheetDescription>
|
||||
</div>
|
||||
{outputs.length > 0 && <OutputActions items={actionItems} />}
|
||||
</div>
|
||||
</SheetHeader>
|
||||
<div className="flex-grow overflow-y-auto px-2 py-2">
|
||||
<ScrollArea className="h-full overflow-auto pr-4">
|
||||
<div className="space-y-6">
|
||||
{outputs && outputs.length > 0 ? (
|
||||
outputs.map((output, i) => (
|
||||
<div key={i} className="space-y-2">
|
||||
<div>
|
||||
<Label className="text-base font-semibold">
|
||||
{output.metadata.name || "Unnamed Output"}
|
||||
</Label>
|
||||
{output.metadata.description && (
|
||||
<Label className="mt-1 block text-sm text-gray-600">
|
||||
{output.metadata.description}
|
||||
</Label>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<OutputItem
|
||||
value={output.value}
|
||||
metadata={{}}
|
||||
renderer={output.renderer}
|
||||
/>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<div className="flex h-full items-center justify-center text-gray-500">
|
||||
<p>No output blocks available.</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { ButtonProps } from "@/components/atoms/Button/helpers";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { CircleNotchIcon } from "@phosphor-icons/react";
|
||||
|
||||
export const BuilderActionButton = ({
|
||||
children,
|
||||
className,
|
||||
isLoading,
|
||||
...props
|
||||
}: ButtonProps & { isLoading?: boolean }) => {
|
||||
return (
|
||||
<Button
|
||||
variant="icon"
|
||||
size={"small"}
|
||||
className={cn(
|
||||
"relative h-12 w-12 min-w-0 text-lg",
|
||||
"bg-gradient-to-br from-zinc-50 to-zinc-200",
|
||||
"border border-zinc-200",
|
||||
"shadow-[inset_0_3px_0_0_rgba(255,255,255,0.5),0_2px_4px_0_rgba(0,0,0,0.2)]",
|
||||
"dark:shadow-[inset_0_1px_0_0_rgba(255,255,255,0.1),0_2px_4px_0_rgba(0,0,0,0.4)]",
|
||||
"hover:shadow-[inset_0_1px_0_0_rgba(255,255,255,0.5),0_1px_2px_0_rgba(0,0,0,0.2)]",
|
||||
"active:shadow-[inset_0_2px_4px_0_rgba(0,0,0,0.2)]",
|
||||
"transition-all duration-150",
|
||||
"disabled:cursor-not-allowed disabled:opacity-50",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{!isLoading ? (
|
||||
children
|
||||
) : (
|
||||
<CircleNotchIcon className="size-6 animate-spin" />
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
@@ -1,9 +1,7 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { PlayIcon } from "lucide-react";
|
||||
import { useRunGraph } from "./useRunGraph";
|
||||
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { StopIcon } from "@phosphor-icons/react";
|
||||
import { PlayIcon, StopIcon } from "@phosphor-icons/react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { RunInputDialog } from "../RunInputDialog/RunInputDialog";
|
||||
import {
|
||||
@@ -11,14 +9,16 @@ import {
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import { BuilderActionButton } from "../BuilderActionButton";
|
||||
|
||||
export const RunGraph = () => {
|
||||
export const RunGraph = ({ flowID }: { flowID: string | null }) => {
|
||||
const {
|
||||
handleRunGraph,
|
||||
handleStopGraph,
|
||||
isSaving,
|
||||
openRunInputDialog,
|
||||
setOpenRunInputDialog,
|
||||
isExecutingGraph,
|
||||
isSaving,
|
||||
} = useRunGraph();
|
||||
const isGraphRunning = useGraphStore(
|
||||
useShallow((state) => state.isGraphRunning),
|
||||
@@ -28,20 +28,21 @@ export const RunGraph = () => {
|
||||
<>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="primary"
|
||||
size="large"
|
||||
<BuilderActionButton
|
||||
className={cn(
|
||||
"relative min-w-0 border-none bg-gradient-to-r from-purple-500 to-pink-500 text-lg",
|
||||
isGraphRunning &&
|
||||
"border-red-500 bg-gradient-to-br from-red-400 to-red-500 shadow-[inset_0_2px_0_0_rgba(255,255,255,0.5),0_2px_4px_0_rgba(0,0,0,0.2)]",
|
||||
)}
|
||||
onClick={isGraphRunning ? handleStopGraph : handleRunGraph}
|
||||
disabled={!flowID || isExecutingGraph}
|
||||
isLoading={isExecutingGraph || isSaving}
|
||||
>
|
||||
{!isGraphRunning && !isSaving ? (
|
||||
<PlayIcon className="size-6" />
|
||||
{!isGraphRunning ? (
|
||||
<PlayIcon className="size-6 drop-shadow-sm" />
|
||||
) : (
|
||||
<StopIcon className="size-6" />
|
||||
<StopIcon className="size-6 drop-shadow-sm" />
|
||||
)}
|
||||
</Button>
|
||||
</BuilderActionButton>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{isGraphRunning ? "Stop agent" : "Run agent"}
|
||||
|
||||
@@ -31,25 +31,26 @@ export const useRunGraph = () => {
|
||||
flowExecutionID: parseAsString,
|
||||
});
|
||||
|
||||
const { mutateAsync: executeGraph } = usePostV1ExecuteGraphAgent({
|
||||
mutation: {
|
||||
onSuccess: (response) => {
|
||||
const { id } = response.data as GraphExecutionMeta;
|
||||
setQueryStates({
|
||||
flowExecutionID: id,
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
setIsGraphRunning(false);
|
||||
const { mutateAsync: executeGraph, isPending: isExecutingGraph } =
|
||||
usePostV1ExecuteGraphAgent({
|
||||
mutation: {
|
||||
onSuccess: (response) => {
|
||||
const { id } = response.data as GraphExecutionMeta;
|
||||
setQueryStates({
|
||||
flowExecutionID: id,
|
||||
});
|
||||
},
|
||||
onError: (error) => {
|
||||
setIsGraphRunning(false);
|
||||
|
||||
toast({
|
||||
title: (error.detail as string) ?? "An unexpected error occurred.",
|
||||
description: "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
toast({
|
||||
title: (error.detail as string) ?? "An unexpected error occurred.",
|
||||
description: "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
const { mutateAsync: stopGraph } = usePostV1StopGraphExecution({
|
||||
mutation: {
|
||||
@@ -72,7 +73,6 @@ export const useRunGraph = () => {
|
||||
if (hasInputs() || hasCredentials()) {
|
||||
setOpenRunInputDialog(true);
|
||||
} else {
|
||||
setIsGraphRunning(true);
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
@@ -95,6 +95,7 @@ export const useRunGraph = () => {
|
||||
handleRunGraph,
|
||||
handleStopGraph,
|
||||
isSaving,
|
||||
isExecutingGraph,
|
||||
openRunInputDialog,
|
||||
setOpenRunInputDialog,
|
||||
};
|
||||
|
||||
@@ -105,7 +105,9 @@ export const RunInputDialog = ({
|
||||
onClick={handleManualRun}
|
||||
loading={isExecutingGraph}
|
||||
>
|
||||
<PlayIcon className="size-5 transition-transform group-hover:scale-110" />
|
||||
{!isExecutingGraph && (
|
||||
<PlayIcon className="size-5 transition-transform group-hover:scale-110" />
|
||||
)}
|
||||
<span className="font-semibold">Manual Run</span>
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -43,7 +43,6 @@ export const useRunInputDialog = ({
|
||||
setQueryStates({
|
||||
flowExecutionID: id,
|
||||
});
|
||||
setIsGraphRunning(false);
|
||||
},
|
||||
onError: (error) => {
|
||||
setIsGraphRunning(false);
|
||||
@@ -79,14 +78,13 @@ export const useRunInputDialog = ({
|
||||
return dynamicUiSchema;
|
||||
}, [credentialsSchema]);
|
||||
|
||||
const handleManualRun = () => {
|
||||
setIsOpen(false);
|
||||
setIsGraphRunning(true);
|
||||
executeGraph({
|
||||
const handleManualRun = async () => {
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
data: { inputs: inputValues, credentials_inputs: credentialValues },
|
||||
});
|
||||
setIsOpen(false);
|
||||
};
|
||||
|
||||
const handleInputChange = (inputValues: Record<string, any>) => {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { ClockIcon } from "@phosphor-icons/react";
|
||||
import { RunInputDialog } from "../RunInputDialog/RunInputDialog";
|
||||
import { useScheduleGraph } from "./useScheduleGraph";
|
||||
@@ -9,8 +8,9 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import { CronSchedulerDialog } from "../CronSchedulerDialog/CronSchedulerDialog";
|
||||
import { BuilderActionButton } from "../BuilderActionButton";
|
||||
|
||||
export const ScheduleGraph = () => {
|
||||
export const ScheduleGraph = ({ flowID }: { flowID: string | null }) => {
|
||||
const {
|
||||
openScheduleInputDialog,
|
||||
setOpenScheduleInputDialog,
|
||||
@@ -23,14 +23,12 @@ export const ScheduleGraph = () => {
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="primary"
|
||||
size="large"
|
||||
className={"relative min-w-0 border-none text-lg"}
|
||||
<BuilderActionButton
|
||||
onClick={handleScheduleGraph}
|
||||
disabled={!flowID}
|
||||
>
|
||||
<ClockIcon className="size-6" />
|
||||
</Button>
|
||||
</BuilderActionButton>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>Schedule Graph</p>
|
||||
|
||||
@@ -0,0 +1,590 @@
|
||||
# FlowEditor Architecture Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
The FlowEditor is the core visual graph builder component of the AutoGPT Platform. It allows users to create, edit, and execute workflows by connecting nodes (blocks) together in a visual canvas powered by React Flow (XYFlow).
|
||||
|
||||
---
|
||||
|
||||
## High-Level Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ Flow Component │
|
||||
│ (Main container coordinating all sub-systems) │
|
||||
└───────────────┬──────────────────┬──────────────────────────┘
|
||||
│ │
|
||||
┌─────────▼────────┐ ┌─────▼──────────┐
|
||||
│ State Stores │ │ React Flow │
|
||||
│ (Zustand) │ │ Canvas │
|
||||
└────────┬─────────┘ └────────────────┘
|
||||
│
|
||||
┌──────────┼──────────┬──────────┐
|
||||
│ │ │ │
|
||||
┌───▼───┐ ┌──▼───┐ ┌───▼────┐ ┌─▼────────┐
|
||||
│ Node │ │ Edge │ │ Graph │ │ Control │
|
||||
│ Store │ │ Store│ │ Store │ │ Panel │
|
||||
└───────┘ └──────┘ └────────┘ └──────────┘
|
||||
│ │
|
||||
│ │
|
||||
┌───▼──────────▼────────────────────────────────────┐
|
||||
│ Custom Nodes & Edges │
|
||||
│ (Visual components rendered on canvas) │
|
||||
└───────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Core Components Breakdown
|
||||
|
||||
### 1. **Flow Component** (`Flow/Flow.tsx`)
|
||||
|
||||
The main orchestrator component that brings everything together.
|
||||
|
||||
**Responsibilities:**
|
||||
|
||||
- Renders the ReactFlow canvas
|
||||
- Integrates all stores (nodes, edges, graph state)
|
||||
- Handles drag-and-drop for adding blocks
|
||||
- Manages keyboard shortcuts (copy/paste)
|
||||
- Controls lock state (editable vs read-only)
|
||||
|
||||
**Key Features:**
|
||||
|
||||
```tsx
|
||||
<ReactFlow
|
||||
nodes={nodes} // From nodeStore
|
||||
edges={edges} // From edgeStore
|
||||
onNodesChange={...} // Updates nodeStore
|
||||
onEdgesChange={...} // Updates edgeStore
|
||||
onConnect={...} // Creates new connections
|
||||
onDragOver={...} // Enables block drag-drop
|
||||
onDrop={...} // Adds blocks to canvas
|
||||
/>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 2. **State Management (Zustand Stores)**
|
||||
|
||||
The FlowEditor uses **4 primary Zustand stores** for state management:
|
||||
|
||||
#### **A. nodeStore** (`stores/nodeStore.ts`)
|
||||
|
||||
Manages all nodes (blocks) on the canvas.
|
||||
|
||||
**State:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
nodes: CustomNode[] // All nodes on canvas
|
||||
nodeCounter: number // Auto-increment for IDs
|
||||
nodeAdvancedStates: Record<string, boolean> // Track advanced toggle
|
||||
}
|
||||
```
|
||||
|
||||
**Key Actions:**
|
||||
|
||||
- `addBlock()` - Creates a new block with position calculation
|
||||
- `updateNodeData()` - Updates block's form values
|
||||
- `addNodes()` - Bulk add (used when loading graph)
|
||||
- `updateNodeStatus()` - Updates execution status (running/success/failed)
|
||||
- `updateNodeExecutionResult()` - Stores output data from execution
|
||||
- `getBackendNodes()` - Converts to backend format for saving
|
||||
|
||||
**Flow:**
|
||||
|
||||
1. User drags block from menu → `addBlock()` called
|
||||
2. Block appears with unique ID at calculated position
|
||||
3. User edits form → `updateNodeData()` updates hardcodedValues
|
||||
4. On execution → status updates propagate via `updateNodeStatus()`
|
||||
|
||||
---
|
||||
|
||||
#### **B. edgeStore** (`stores/edgeStore.ts`)
|
||||
|
||||
Manages all connections (links) between nodes.
|
||||
|
||||
**State:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
edges: CustomEdge[] // All connections
|
||||
edgeBeads: Record<string, EdgeBead[]> // Animated data flow indicators
|
||||
}
|
||||
```
|
||||
|
||||
**Key Actions:**
|
||||
|
||||
- `addLinks()` - Creates connections between nodes
|
||||
- `onConnect()` - Handles new connection creation
|
||||
- `updateEdgeBeads()` - Shows animated data flow during execution
|
||||
- `getBackendLinks()` - Converts to backend format
|
||||
|
||||
**Connection Logic:**
|
||||
|
||||
```
|
||||
Source Node (output) → Edge → Target Node (input)
|
||||
└─ outputPin │ └─ inputPin
|
||||
│
|
||||
(validated connection)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
#### **C. graphStore** (`stores/graphStore.ts`)
|
||||
|
||||
Manages graph-level metadata and state.
|
||||
|
||||
**State:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
isGraphRunning: boolean // Execution status
|
||||
inputSchema: Record<string, any> // Graph-level inputs
|
||||
credentialsInputSchema: Record<...> // Required credentials
|
||||
outputSchema: Record<string, any> // Graph-level outputs
|
||||
}
|
||||
```
|
||||
|
||||
**Purpose:**
|
||||
|
||||
- Tracks if graph is currently executing
|
||||
- Stores graph-level input/output schemas (for agent graphs)
|
||||
- Used by BuilderActions to show/hide input/output panels
|
||||
|
||||
---
|
||||
|
||||
#### **D. controlPanelStore**
|
||||
|
||||
Manages UI state for the control panel (block menu, settings).
|
||||
|
||||
**State:**
|
||||
|
||||
```typescript
|
||||
{
|
||||
blockMenuOpen: boolean;
|
||||
selectedBlock: BlockInfo | null;
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 3. **useFlow Hook** (`Flow/useFlow.ts`)
|
||||
|
||||
The main data-loading and initialization hook.
|
||||
|
||||
**Lifecycle:**
|
||||
|
||||
```
|
||||
1. Component Mounts
|
||||
↓
|
||||
2. Read URL params (flowID, flowVersion, flowExecutionID)
|
||||
↓
|
||||
3. Fetch graph data from API
|
||||
↓
|
||||
4. Fetch block definitions for all blocks in graph
|
||||
↓
|
||||
5. Convert to CustomNodes
|
||||
↓
|
||||
6. Add nodes to nodeStore
|
||||
↓
|
||||
7. Add links to edgeStore
|
||||
↓
|
||||
8. If execution exists → fetch execution details
|
||||
↓
|
||||
9. Update node statuses and results
|
||||
↓
|
||||
10. Initialize history (undo/redo)
|
||||
```
|
||||
|
||||
**Key Responsibilities:**
|
||||
|
||||
- **Data Fetching**: Loads graph, blocks, and execution data
|
||||
- **Data Transformation**: Converts backend models to frontend CustomNodes
|
||||
- **State Initialization**: Populates stores with loaded data
|
||||
- **Drag & Drop**: Handles block drag-drop from menu
|
||||
- **Cleanup**: Resets stores on unmount
|
||||
|
||||
**Important Effects:**
|
||||
|
||||
```typescript
|
||||
// Load nodes when data is ready
|
||||
useEffect(() => {
|
||||
if (customNodes.length > 0) {
|
||||
addNodes(customNodes);
|
||||
}
|
||||
}, [customNodes]);
|
||||
|
||||
// Update node execution status in real-time
|
||||
useEffect(() => {
|
||||
executionDetails.node_executions.forEach((nodeExecution) => {
|
||||
updateNodeStatus(nodeExecution.node_id, nodeExecution.status);
|
||||
updateNodeExecutionResult(nodeExecution.node_id, nodeExecution);
|
||||
});
|
||||
}, [executionDetails]);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 4. **Custom Nodes** (`nodes/CustomNode/`)
|
||||
|
||||
Visual representation of blocks on the canvas.
|
||||
|
||||
**Structure:**
|
||||
|
||||
```
|
||||
CustomNode
|
||||
├── NodeContainer (selection, context menu, positioning)
|
||||
├── NodeHeader (title, icon, badges)
|
||||
├── FormCreator (input fields using FormRenderer)
|
||||
├── NodeAdvancedToggle (show/hide advanced fields)
|
||||
├── OutputHandler (output connection points)
|
||||
└── NodeDataRenderer (execution results display)
|
||||
```
|
||||
|
||||
**Node Data Structure:**
|
||||
|
||||
```typescript
|
||||
type CustomNodeData = {
|
||||
hardcodedValues: Record<string, any>; // User input values
|
||||
title: string; // Display name
|
||||
description: string; // Help text
|
||||
inputSchema: RJSFSchema; // Input form schema
|
||||
outputSchema: RJSFSchema; // Output schema
|
||||
uiType: BlockUIType; // UI variant (STANDARD, INPUT, OUTPUT, etc.)
|
||||
block_id: string; // Backend block ID
|
||||
status?: AgentExecutionStatus; // Execution state
|
||||
nodeExecutionResult?: NodeExecutionResult; // Output data
|
||||
costs: BlockCost[]; // Cost information
|
||||
categories: BlockInfoCategoriesItem[]; // Categorization
|
||||
};
|
||||
```
|
||||
|
||||
**Special Node Types:**
|
||||
|
||||
- `BlockUIType.NOTE` - Sticky note (no execution)
|
||||
- `BlockUIType.INPUT` - Graph input (no left handles)
|
||||
- `BlockUIType.OUTPUT` - Graph output (no right handles)
|
||||
- `BlockUIType.WEBHOOK` - Webhook trigger
|
||||
- `BlockUIType.AGENT` - Sub-agent execution
|
||||
|
||||
---
|
||||
|
||||
### 5. **Custom Edges** (`edges/CustomEdge.tsx`)
|
||||
|
||||
Visual connections between nodes with animated data flow.
|
||||
|
||||
**Features:**
|
||||
|
||||
- **Animated Beads**: Show data flowing during execution
|
||||
- **Type-aware Styling**: Different colors for different data types
|
||||
- **Validation**: Prevents invalid connections
|
||||
- **Deletion**: Click to remove connection
|
||||
|
||||
**Bead Animation System:**
|
||||
|
||||
```
|
||||
Node Execution Complete
|
||||
↓
|
||||
EdgeStore.updateEdgeBeads() called
|
||||
↓
|
||||
Beads created with output data
|
||||
↓
|
||||
CSS animation moves beads along edge path
|
||||
↓
|
||||
Beads removed after animation
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### 6. **Handlers (Connection Points)** (`handlers/NodeHandle.tsx`)
|
||||
|
||||
The connection points on nodes where edges attach.
|
||||
|
||||
**Handle ID Format:**
|
||||
|
||||
```typescript
|
||||
// Input handle: input-{propertyName}
|
||||
"input-text_content";
|
||||
|
||||
// Output handle: output-{propertyName}
|
||||
"output-result";
|
||||
```
|
||||
|
||||
**Connection Validation:**
|
||||
|
||||
- Type compatibility checking
|
||||
- Prevents cycles
|
||||
- Single input connection enforcement
|
||||
- Multiple output connections allowed
|
||||
|
||||
---
|
||||
|
||||
## Data Flow: Adding a Block
|
||||
|
||||
```
|
||||
1. User drags block from BlockMenu
|
||||
↓
|
||||
2. onDragOver handler validates drop
|
||||
↓
|
||||
3. onDrop handler called
|
||||
↓
|
||||
4. Parse block data from dataTransfer
|
||||
↓
|
||||
5. Calculate position: screenToFlowPosition()
|
||||
↓
|
||||
6. nodeStore.addBlock(blockData, {}, position)
|
||||
↓
|
||||
7. New CustomNode created with:
|
||||
- Unique ID (nodeCounter++)
|
||||
- Initial position
|
||||
- Empty hardcodedValues
|
||||
- Block schema
|
||||
↓
|
||||
8. Node added to nodes array
|
||||
↓
|
||||
9. React Flow renders CustomNode component
|
||||
↓
|
||||
10. FormCreator renders input form
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Flow: Connecting Nodes
|
||||
|
||||
```
|
||||
1. User drags from source handle to target handle
|
||||
↓
|
||||
2. React Flow calls onConnect()
|
||||
↓
|
||||
3. useCustomEdge hook processes:
|
||||
- Validate connection (type compatibility)
|
||||
- Generate edge ID
|
||||
- Check for cycles
|
||||
↓
|
||||
4. edgeStore.addEdge() creates CustomEdge
|
||||
↓
|
||||
5. Edge rendered on canvas
|
||||
↓
|
||||
6. Target node's input becomes "connected"
|
||||
↓
|
||||
7. FormRenderer hides input field (shows handle only)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Flow: Graph Execution
|
||||
|
||||
```
|
||||
1. User clicks "Run" in BuilderActions
|
||||
↓
|
||||
2. useSaveGraph hook saves current state
|
||||
↓
|
||||
3. API call: POST /execute
|
||||
↓
|
||||
4. Backend queues execution
|
||||
↓
|
||||
5. useFlowRealtime subscribes to WebSocket
|
||||
↓
|
||||
6. Execution updates stream in:
|
||||
- Node status changes (QUEUED → RUNNING → COMPLETED)
|
||||
- Node results
|
||||
↓
|
||||
7. useFlow updates:
|
||||
- nodeStore.updateNodeStatus()
|
||||
- nodeStore.updateNodeExecutionResult()
|
||||
- edgeStore.updateEdgeBeads() (animate data flow)
|
||||
↓
|
||||
8. UI reflects changes:
|
||||
- NodeExecutionBadge shows status
|
||||
- OutputHandler displays results
|
||||
- Edges animate with beads
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Data Flow: Saving a Graph
|
||||
|
||||
```
|
||||
1. User edits form in CustomNode
|
||||
↓
|
||||
2. FormCreator calls handleChange()
|
||||
↓
|
||||
3. nodeStore.updateNodeData(nodeId, { hardcodedValues })
|
||||
↓
|
||||
4. historyStore.pushState() (for undo/redo)
|
||||
↓
|
||||
5. User clicks "Save"
|
||||
↓
|
||||
6. useSaveGraph hook:
|
||||
- nodeStore.getBackendNodes() → convert to backend format
|
||||
- edgeStore.getBackendLinks() → convert links
|
||||
↓
|
||||
7. API call: PUT /graph/:id
|
||||
↓
|
||||
8. Backend persists changes
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Key Utilities and Helpers
|
||||
|
||||
### **Position Calculation** (`components/helper.ts`)
|
||||
|
||||
```typescript
|
||||
findFreePosition(existingNodes, width, margin);
|
||||
// Finds empty space on canvas to place new block
|
||||
// Uses grid-based collision detection
|
||||
```
|
||||
|
||||
### **Node Conversion** (`components/helper.ts`)
|
||||
|
||||
```typescript
|
||||
convertBlockInfoIntoCustomNodeData(blockInfo, hardcodedValues);
|
||||
// Converts backend BlockInfo → CustomNodeData
|
||||
|
||||
convertNodesPlusBlockInfoIntoCustomNodes(node, blockInfo);
|
||||
// Merges backend Node + BlockInfo → CustomNode (for loading)
|
||||
```
|
||||
|
||||
### **Handle ID Generation** (`handlers/helpers.ts`)
|
||||
|
||||
```typescript
|
||||
generateHandleId(fieldId);
|
||||
// input-{fieldId} or output-{fieldId}
|
||||
// Used to uniquely identify connection points
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### **Copy/Paste** (`Flow/useCopyPaste.ts`)
|
||||
|
||||
- Duplicates selected nodes with offset positioning
|
||||
- Preserves internal connections
|
||||
- Does not copy external connections
|
||||
|
||||
### **Undo/Redo** (`stores/historyStore.ts`)
|
||||
|
||||
- Tracks state snapshots (nodes + edges)
|
||||
- Maintains history stack
|
||||
- Triggered on significant changes (add/remove/move)
|
||||
|
||||
### **Realtime Updates** (`Flow/useFlowRealtime.ts`)
|
||||
|
||||
- WebSocket connection for live execution updates
|
||||
- Subscribes to execution events
|
||||
- Updates node status and results in real-time
|
||||
|
||||
### **Advanced Fields Toggle**
|
||||
|
||||
- Each node tracks `showAdvanced` state
|
||||
- Fields with `advanced: true` hidden by default
|
||||
- Toggle button in node UI
|
||||
- Connected fields always visible
|
||||
|
||||
---
|
||||
|
||||
## Integration Points
|
||||
|
||||
### **With Backend API**
|
||||
|
||||
```
|
||||
GET /v1/graphs/:id → Load graph
|
||||
GET /v2/blocks → Get block definitions
|
||||
GET /v1/executions/:id → Get execution details
|
||||
PUT /v1/graphs/:id → Save graph
|
||||
POST /v1/graphs/:id/execute → Run graph
|
||||
WebSocket /ws → Real-time updates
|
||||
```
|
||||
|
||||
### **With FormRenderer** (See ARCHITECTURE_INPUT_RENDERER.md)
|
||||
|
||||
```
|
||||
CustomNode → FormCreator → FormRenderer
|
||||
↓
|
||||
(RJSF-based form)
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
1. **Memoization**: React.memo on CustomNode to prevent unnecessary re-renders
|
||||
2. **Shallow Selectors**: useShallow() with Zustand to limit re-renders
|
||||
3. **Lazy Loading**: Blocks fetched only when needed
|
||||
4. **Debounced Saves**: Form changes debounced before triggering history
|
||||
5. **Virtual Scrolling**: React Flow handles large graphs efficiently
|
||||
|
||||
---
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### **Adding a New Block Type**
|
||||
|
||||
1. Define `BlockUIType` enum value
|
||||
2. Create backend block with `uiType` field
|
||||
3. Add conditional rendering in CustomNode if needed
|
||||
4. Update handle visibility logic if required
|
||||
|
||||
### **Adding a New Field Type**
|
||||
|
||||
1. Create custom field in input-renderer/fields
|
||||
2. Register in fields/index.ts
|
||||
3. Use in block's inputSchema
|
||||
|
||||
### **Debugging Tips**
|
||||
|
||||
- Check browser DevTools → React Flow state
|
||||
- Inspect Zustand stores: `useNodeStore.getState()`
|
||||
- Look for console errors in edge validation
|
||||
- Check WebSocket connection for realtime issues
|
||||
|
||||
---
|
||||
|
||||
## Common Issues & Solutions
|
||||
|
||||
**Issue**: Nodes not appearing after load
|
||||
|
||||
- **Check**: `customNodes` computed correctly in useFlow
|
||||
- **Check**: `addNodes()` called after data fetched
|
||||
|
||||
**Issue**: Form not updating node data
|
||||
|
||||
- **Check**: `handleChange` in FormCreator wired correctly
|
||||
- **Check**: `updateNodeData` called with correct nodeId
|
||||
|
||||
**Issue**: Edges not connecting
|
||||
|
||||
- **Check**: Handle IDs match between source and target
|
||||
- **Check**: Type compatibility validation
|
||||
- **Check**: No cycles created
|
||||
|
||||
**Issue**: Execution status not updating
|
||||
|
||||
- **Check**: WebSocket connection active
|
||||
- **Check**: `flowExecutionID` in URL
|
||||
- **Check**: `updateNodeStatus` called in useFlow effect
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
The FlowEditor is a sophisticated visual workflow builder that:
|
||||
|
||||
1. Uses **React Flow** for canvas rendering
|
||||
2. Manages state with **Zustand stores** (nodes, edges, graph, control)
|
||||
3. Loads data via **useFlow hook** from backend API
|
||||
4. Renders blocks as **CustomNodes** with dynamic forms
|
||||
5. Connects blocks via **CustomEdges** with validation
|
||||
6. Executes graphs with **real-time status updates**
|
||||
7. Saves changes back to backend in structured format
|
||||
|
||||
The architecture prioritizes:
|
||||
|
||||
- **Separation of concerns** (stores, hooks, components)
|
||||
- **Type safety** (TypeScript throughout)
|
||||
- **Performance** (memoization, shallow selectors)
|
||||
- **Developer experience** (clear data flow, utilities)
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ReactFlow, Background, Controls } from "@xyflow/react";
|
||||
import { ReactFlow, Background } from "@xyflow/react";
|
||||
import NewControlPanel from "../../NewControlPanel/NewControlPanel";
|
||||
import CustomEdge from "../edges/CustomEdge";
|
||||
import { useFlow } from "./useFlow";
|
||||
@@ -13,6 +13,7 @@ import { BuilderActions } from "../../BuilderActions/BuilderActions";
|
||||
import { RunningBackground } from "./components/RunningBackground";
|
||||
import { useGraphStore } from "../../../stores/graphStore";
|
||||
import { useCopyPaste } from "./useCopyPaste";
|
||||
import { CustomControls } from "./components/CustomControl";
|
||||
|
||||
export const Flow = () => {
|
||||
const nodes = useNodeStore(useShallow((state) => state.nodes));
|
||||
@@ -20,10 +21,12 @@ export const Flow = () => {
|
||||
useShallow((state) => state.onNodesChange),
|
||||
);
|
||||
const nodeTypes = useMemo(() => ({ custom: CustomNode }), []);
|
||||
const edgeTypes = useMemo(() => ({ custom: CustomEdge }), []);
|
||||
const { edges, onConnect, onEdgesChange } = useCustomEdge();
|
||||
|
||||
// We use this hook to load the graph and convert them into custom nodes and edges.
|
||||
const { onDragOver, onDrop } = useFlow();
|
||||
const { onDragOver, onDrop, isFlowContentLoading, isLocked, setIsLocked } =
|
||||
useFlow();
|
||||
|
||||
// This hook is used for websocket realtime updates.
|
||||
useFlowRealtime();
|
||||
@@ -41,8 +44,6 @@ export const Flow = () => {
|
||||
window.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, [handleCopyPaste]);
|
||||
|
||||
const { isFlowContentLoading } = useFlow();
|
||||
const { isGraphRunning } = useGraphStore();
|
||||
return (
|
||||
<div className="flex h-full w-full dark:bg-slate-900">
|
||||
@@ -51,20 +52,23 @@ export const Flow = () => {
|
||||
nodes={nodes}
|
||||
onNodesChange={onNodesChange}
|
||||
nodeTypes={nodeTypes}
|
||||
edgeTypes={edgeTypes}
|
||||
edges={edges}
|
||||
onConnect={onConnect}
|
||||
onEdgesChange={onEdgesChange}
|
||||
edgeTypes={{ custom: CustomEdge }}
|
||||
maxZoom={2}
|
||||
minZoom={0.1}
|
||||
onDragOver={onDragOver}
|
||||
onDrop={onDrop}
|
||||
nodesDraggable={!isLocked}
|
||||
nodesConnectable={!isLocked}
|
||||
elementsSelectable={!isLocked}
|
||||
>
|
||||
<Background />
|
||||
<Controls />
|
||||
<CustomControls setIsLocked={setIsLocked} isLocked={isLocked} />
|
||||
<NewControlPanel />
|
||||
<BuilderActions />
|
||||
{isFlowContentLoading && <GraphLoadingBox />}
|
||||
{<GraphLoadingBox flowContentLoading={isFlowContentLoading} />}
|
||||
{isGraphRunning && <RunningBackground />}
|
||||
</ReactFlow>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,80 @@
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import {
|
||||
FrameCornersIcon,
|
||||
MinusIcon,
|
||||
PlusIcon,
|
||||
} from "@phosphor-icons/react/dist/ssr";
|
||||
import { LockIcon, LockOpenIcon } from "lucide-react";
|
||||
import { memo } from "react";
|
||||
|
||||
export const CustomControls = memo(
|
||||
({
|
||||
setIsLocked,
|
||||
isLocked,
|
||||
}: {
|
||||
isLocked: boolean;
|
||||
setIsLocked: (isLocked: boolean) => void;
|
||||
}) => {
|
||||
const { zoomIn, zoomOut, fitView } = useReactFlow();
|
||||
|
||||
const controls = [
|
||||
{
|
||||
icon: <PlusIcon className="size-4" />,
|
||||
label: "Zoom In",
|
||||
onClick: () => zoomIn(),
|
||||
className: "h-10 w-10 border-none",
|
||||
},
|
||||
{
|
||||
icon: <MinusIcon className="size-4" />,
|
||||
label: "Zoom Out",
|
||||
onClick: () => zoomOut(),
|
||||
className: "h-10 w-10 border-none",
|
||||
},
|
||||
{
|
||||
icon: <FrameCornersIcon className="size-4" />,
|
||||
label: "Fit View",
|
||||
onClick: () => fitView({ padding: 0.2, duration: 800, maxZoom: 1 }),
|
||||
className: "h-10 w-10 border-none",
|
||||
},
|
||||
{
|
||||
icon: !isLocked ? (
|
||||
<LockOpenIcon className="size-4" />
|
||||
) : (
|
||||
<LockIcon className="size-4" />
|
||||
),
|
||||
label: "Toggle Lock",
|
||||
onClick: () => setIsLocked(!isLocked),
|
||||
className: `h-10 w-10 border-none ${isLocked ? "bg-zinc-100" : "bg-white"}`,
|
||||
},
|
||||
];
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-4 left-4 z-10 flex flex-col items-center gap-2 rounded-full bg-white px-1 py-2 shadow-lg">
|
||||
{controls.map((control, index) => (
|
||||
<Tooltip key={index} delayDuration={300}>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant="icon"
|
||||
size={"small"}
|
||||
onClick={control.onClick}
|
||||
className={control.className}
|
||||
>
|
||||
{control.icon}
|
||||
<span className="sr-only">{control.label}</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right">{control.label}</TooltipContent>
|
||||
</Tooltip>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
CustomControls.displayName = "CustomControls";
|
||||
@@ -1,15 +1,44 @@
|
||||
import {
|
||||
getPostV1CreateNewGraphMutationOptions,
|
||||
getPutV1UpdateGraphVersionMutationOptions,
|
||||
} from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { useIsMutating } from "@tanstack/react-query";
|
||||
|
||||
export const GraphLoadingBox = ({
|
||||
flowContentLoading,
|
||||
}: {
|
||||
flowContentLoading: boolean;
|
||||
}) => {
|
||||
const isCreating = useIsMutating({
|
||||
mutationKey: getPostV1CreateNewGraphMutationOptions().mutationKey,
|
||||
});
|
||||
const isUpdating = useIsMutating({
|
||||
mutationKey: getPutV1UpdateGraphVersionMutationOptions().mutationKey,
|
||||
});
|
||||
|
||||
const isSaving = !!(isCreating || isUpdating);
|
||||
|
||||
if (!flowContentLoading && !isSaving) {
|
||||
return null;
|
||||
}
|
||||
|
||||
export const GraphLoadingBox = () => {
|
||||
return (
|
||||
<div className="absolute left-[50%] top-[50%] z-[99] -translate-x-1/2 -translate-y-1/2">
|
||||
<div className="flex flex-col items-center gap-4 rounded-xlarge border border-gray-200 bg-white p-8 shadow-lg dark:border-gray-700 dark:bg-slate-800">
|
||||
<div className="relative h-12 w-12">
|
||||
<div className="absolute inset-0 animate-spin rounded-full border-4 border-violet-200 border-t-violet-500 dark:border-gray-700 dark:border-t-blue-400"></div>
|
||||
<div className="absolute inset-0 animate-spin rounded-full border-4 border-zinc-100 border-t-zinc-400 dark:border-gray-700 dark:border-t-blue-400"></div>
|
||||
</div>
|
||||
<div className="flex flex-col items-center gap-2">
|
||||
<Text variant="h4">Loading Flow</Text>
|
||||
<Text variant="small">Please wait while we load your graph...</Text>
|
||||
{isSaving && <Text variant="h4">Saving Graph</Text>}
|
||||
{flowContentLoading && <Text variant="h4">Loading Flow</Text>}
|
||||
|
||||
{isSaving && (
|
||||
<Text variant="small">Please wait while we save your graph...</Text>
|
||||
)}
|
||||
{flowContentLoading && (
|
||||
<Text variant="small">Please wait while we load your graph...</Text>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -2,154 +2,53 @@ export const RunningBackground = () => {
|
||||
return (
|
||||
<div className="absolute inset-0 h-full w-full">
|
||||
<style jsx>{`
|
||||
@keyframes rotateGradient {
|
||||
0% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#bc82f3 17%,
|
||||
#f5b9ea 24%,
|
||||
#8d99ff 35%,
|
||||
#aa6eee 58%,
|
||||
#ff6778 70%,
|
||||
#ffba71 81%,
|
||||
#c686ff 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
14.28% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#c686ff 17%,
|
||||
#bc82f3 24%,
|
||||
#f5b9ea 35%,
|
||||
#8d99ff 58%,
|
||||
#aa6eee 70%,
|
||||
#ff6778 81%,
|
||||
#ffba71 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
28.56% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#ffba71 17%,
|
||||
#c686ff 24%,
|
||||
#bc82f3 35%,
|
||||
#f5b9ea 58%,
|
||||
#8d99ff 70%,
|
||||
#aa6eee 81%,
|
||||
#ff6778 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
42.84% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#ff6778 17%,
|
||||
#ffba71 24%,
|
||||
#c686ff 35%,
|
||||
#bc82f3 58%,
|
||||
#f5b9ea 70%,
|
||||
#8d99ff 81%,
|
||||
#aa6eee 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
57.12% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#aa6eee 17%,
|
||||
#ff6778 24%,
|
||||
#ffba71 35%,
|
||||
#c686ff 58%,
|
||||
#bc82f3 70%,
|
||||
#f5b9ea 81%,
|
||||
#8d99ff 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
71.4% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#8d99ff 17%,
|
||||
#aa6eee 24%,
|
||||
#ff6778 35%,
|
||||
#ffba71 58%,
|
||||
#c686ff 70%,
|
||||
#bc82f3 81%,
|
||||
#f5b9ea 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
85.68% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#f5b9ea 17%,
|
||||
#8d99ff 24%,
|
||||
#aa6eee 35%,
|
||||
#ff6778 58%,
|
||||
#ffba71 70%,
|
||||
#c686ff 81%,
|
||||
#bc82f3 92%
|
||||
)
|
||||
1;
|
||||
}
|
||||
@keyframes pulse {
|
||||
0%,
|
||||
100% {
|
||||
border-image: linear-gradient(
|
||||
to right,
|
||||
#bc82f3 17%,
|
||||
#f5b9ea 24%,
|
||||
#8d99ff 35%,
|
||||
#aa6eee 58%,
|
||||
#ff6778 70%,
|
||||
#ffba71 81%,
|
||||
#c686ff 92%
|
||||
)
|
||||
1;
|
||||
opacity: 1;
|
||||
}
|
||||
50% {
|
||||
opacity: 0.5;
|
||||
}
|
||||
}
|
||||
.animate-gradient {
|
||||
animation: rotateGradient 8s linear infinite;
|
||||
.animate-pulse-border {
|
||||
animation: pulse 2s cubic-bezier(0.4, 0, 0.6, 1) infinite;
|
||||
}
|
||||
`}</style>
|
||||
<div
|
||||
className="animate-gradient absolute inset-0 bg-transparent blur-xl"
|
||||
className="animate-pulse-border absolute inset-0 bg-transparent blur-xl"
|
||||
style={{
|
||||
borderWidth: "15px",
|
||||
borderStyle: "solid",
|
||||
borderColor: "transparent",
|
||||
borderImage:
|
||||
"linear-gradient(to right, #BC82F3 17%, #F5B9EA 24%, #8D99FF 35%, #AA6EEE 58%, #FF6778 70%, #FFBA71 81%, #C686FF 92%) 1",
|
||||
borderImage: "linear-gradient(to right, #BC82F3, #BC82F3) 1",
|
||||
}}
|
||||
></div>
|
||||
<div
|
||||
className="animate-gradient absolute inset-0 bg-transparent blur-lg"
|
||||
className="animate-pulse-border absolute inset-0 bg-transparent blur-lg"
|
||||
style={{
|
||||
borderWidth: "10px",
|
||||
borderStyle: "solid",
|
||||
borderColor: "transparent",
|
||||
borderImage:
|
||||
"linear-gradient(to right, #BC82F3 17%, #F5B9EA 24%, #8D99FF 35%, #AA6EEE 58%, #FF6778 70%, #FFBA71 81%, #C686FF 92%) 1",
|
||||
borderImage: "linear-gradient(to right, #BC82F3, #BC82F3) 1",
|
||||
}}
|
||||
></div>
|
||||
<div
|
||||
className="animate-gradient absolute inset-0 bg-transparent blur-md"
|
||||
className="animate-pulse-border absolute inset-0 bg-transparent blur-md"
|
||||
style={{
|
||||
borderWidth: "6px",
|
||||
borderStyle: "solid",
|
||||
borderColor: "transparent",
|
||||
borderImage:
|
||||
"linear-gradient(to right, #BC82F3 17%, #F5B9EA 24%, #8D99FF 35%, #AA6EEE 58%, #FF6778 70%, #FFBA71 81%, #C686FF 92%) 1",
|
||||
borderImage: "linear-gradient(to right, #BC82F3, #BC82F3) 1",
|
||||
}}
|
||||
></div>
|
||||
<div
|
||||
className="animate-gradient absolute inset-0 bg-transparent blur-sm"
|
||||
className="animate-pulse-border absolute inset-0 bg-transparent blur-sm"
|
||||
style={{
|
||||
borderWidth: "6px",
|
||||
borderStyle: "solid",
|
||||
borderColor: "transparent",
|
||||
borderImage:
|
||||
"linear-gradient(to right, #BC82F3 17%, #F5B9EA 24%, #8D99FF 35%, #AA6EEE 58%, #FF6778 70%, #FFBA71 81%, #C686FF 92%) 1",
|
||||
borderImage: "linear-gradient(to right, #BC82F3, #BC82F3) 1",
|
||||
}}
|
||||
></div>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { useCallback, useEffect, useMemo } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { useGetV2GetSpecificBlocks } from "@/app/api/__generated__/endpoints/default/default";
|
||||
import {
|
||||
useGetV1GetExecutionDetails,
|
||||
@@ -16,8 +16,10 @@ import { useGraphStore } from "../../../stores/graphStore";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
import { useControlPanelStore } from "../../../stores/controlPanelStore";
|
||||
import { useHistoryStore } from "../../../stores/historyStore";
|
||||
|
||||
export const useFlow = () => {
|
||||
const [isLocked, setIsLocked] = useState(false);
|
||||
const addNodes = useNodeStore(useShallow((state) => state.addNodes));
|
||||
const addLinks = useEdgeStore(useShallow((state) => state.addLinks));
|
||||
const updateNodeStatus = useNodeStore(
|
||||
@@ -32,7 +34,10 @@ export const useFlow = () => {
|
||||
const setGraphSchemas = useGraphStore(
|
||||
useShallow((state) => state.setGraphSchemas),
|
||||
);
|
||||
const { screenToFlowPosition } = useReactFlow();
|
||||
const updateEdgeBeads = useEdgeStore(
|
||||
useShallow((state) => state.updateEdgeBeads),
|
||||
);
|
||||
const { screenToFlowPosition, fitView } = useReactFlow();
|
||||
const addBlock = useNodeStore(useShallow((state) => state.addBlock));
|
||||
const setBlockMenuOpen = useControlPanelStore(
|
||||
useShallow((state) => state.setBlockMenuOpen),
|
||||
@@ -66,7 +71,9 @@ export const useFlow = () => {
|
||||
);
|
||||
|
||||
const nodes = graph?.nodes;
|
||||
const blockIds = nodes?.map((node) => node.block_id);
|
||||
const blockIds = nodes
|
||||
? Array.from(new Set(nodes.map((node) => node.block_id)))
|
||||
: undefined;
|
||||
|
||||
const { data: blocks, isLoading: isBlocksLoading } =
|
||||
useGetV2GetSpecificBlocks(
|
||||
@@ -92,34 +99,44 @@ export const useFlow = () => {
|
||||
});
|
||||
}, [nodes, blocks]);
|
||||
|
||||
// load graph schemas
|
||||
useEffect(() => {
|
||||
// load graph schemas
|
||||
if (graph) {
|
||||
setGraphSchemas(
|
||||
graph.input_schema as Record<string, any> | null,
|
||||
graph.credentials_input_schema as Record<string, any> | null,
|
||||
graph.output_schema as Record<string, any> | null,
|
||||
);
|
||||
}
|
||||
}, [graph]);
|
||||
|
||||
// adding nodes
|
||||
// adding nodes
|
||||
useEffect(() => {
|
||||
if (customNodes.length > 0) {
|
||||
useNodeStore.getState().setNodes([]);
|
||||
addNodes(customNodes);
|
||||
}
|
||||
}, [customNodes, addNodes]);
|
||||
|
||||
// adding links
|
||||
// adding links
|
||||
useEffect(() => {
|
||||
if (graph?.links) {
|
||||
useEdgeStore.getState().setConnections([]);
|
||||
useEdgeStore.getState().setEdges([]);
|
||||
addLinks(graph.links);
|
||||
}
|
||||
}, [graph?.links, addLinks]);
|
||||
|
||||
// update graph running status
|
||||
// update graph running status
|
||||
useEffect(() => {
|
||||
const isRunning =
|
||||
executionDetails?.status === AgentExecutionStatus.RUNNING ||
|
||||
executionDetails?.status === AgentExecutionStatus.QUEUED;
|
||||
setIsGraphRunning(isRunning);
|
||||
|
||||
// update node execution status in nodes
|
||||
setIsGraphRunning(isRunning);
|
||||
}, [executionDetails?.status, customNodes]);
|
||||
|
||||
// update node execution status in nodes
|
||||
useEffect(() => {
|
||||
if (
|
||||
executionDetails &&
|
||||
"node_executions" in executionDetails &&
|
||||
@@ -129,8 +146,10 @@ export const useFlow = () => {
|
||||
updateNodeStatus(nodeExecution.node_id, nodeExecution.status);
|
||||
});
|
||||
}
|
||||
}, [executionDetails, updateNodeStatus, customNodes]);
|
||||
|
||||
// update node execution results in nodes
|
||||
// update node execution results in nodes, also update edge beads
|
||||
useEffect(() => {
|
||||
if (
|
||||
executionDetails &&
|
||||
"node_executions" in executionDetails &&
|
||||
@@ -138,49 +157,76 @@ export const useFlow = () => {
|
||||
) {
|
||||
executionDetails.node_executions.forEach((nodeExecution) => {
|
||||
updateNodeExecutionResult(nodeExecution.node_id, nodeExecution);
|
||||
updateEdgeBeads(nodeExecution.node_id, nodeExecution);
|
||||
});
|
||||
}
|
||||
}, [customNodes, addNodes, graph?.links, executionDetails, updateNodeStatus]);
|
||||
}, [
|
||||
executionDetails,
|
||||
updateNodeExecutionResult,
|
||||
updateEdgeBeads,
|
||||
customNodes,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (customNodes.length > 0 && graph?.links) {
|
||||
const timer = setTimeout(() => {
|
||||
useHistoryStore.getState().initializeHistory();
|
||||
}, 100);
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [customNodes, graph?.links]);
|
||||
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
useNodeStore.getState().setNodes([]);
|
||||
useEdgeStore.getState().setConnections([]);
|
||||
useEdgeStore.getState().setEdges([]);
|
||||
useGraphStore.getState().reset();
|
||||
useEdgeStore.getState().resetEdgeBeads();
|
||||
setIsGraphRunning(false);
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fitView({ padding: 0.2, duration: 800, maxZoom: 2 });
|
||||
}, [fitView]);
|
||||
|
||||
// Drag and drop block from block menu
|
||||
const onDragOver = useCallback((event: React.DragEvent) => {
|
||||
event.preventDefault();
|
||||
event.dataTransfer.dropEffect = "copy";
|
||||
}, []);
|
||||
|
||||
const onDrop = async (event: React.DragEvent) => {
|
||||
event.preventDefault();
|
||||
const blockDataString = event.dataTransfer.getData("application/reactflow");
|
||||
if (!blockDataString) return;
|
||||
const onDrop = useCallback(
|
||||
async (event: React.DragEvent) => {
|
||||
event.preventDefault();
|
||||
const blockDataString = event.dataTransfer.getData(
|
||||
"application/reactflow",
|
||||
);
|
||||
if (!blockDataString) return;
|
||||
|
||||
try {
|
||||
const blockData = JSON.parse(blockDataString) as BlockInfo;
|
||||
const position = screenToFlowPosition({
|
||||
x: event.clientX,
|
||||
y: event.clientY,
|
||||
});
|
||||
addBlock(blockData, position);
|
||||
try {
|
||||
const blockData = JSON.parse(blockDataString) as BlockInfo;
|
||||
const position = screenToFlowPosition({
|
||||
x: event.clientX,
|
||||
y: event.clientY,
|
||||
});
|
||||
addBlock(blockData, {}, position);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
setBlockMenuOpen(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to drop block:", error);
|
||||
setBlockMenuOpen(true);
|
||||
}
|
||||
};
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
setBlockMenuOpen(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to drop block:", error);
|
||||
setBlockMenuOpen(true);
|
||||
}
|
||||
},
|
||||
[screenToFlowPosition, addBlock, setBlockMenuOpen],
|
||||
);
|
||||
|
||||
return {
|
||||
isFlowContentLoading: isGraphLoading || isBlocksLoading,
|
||||
onDragOver,
|
||||
onDrop,
|
||||
isLocked,
|
||||
setIsLocked,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -9,6 +9,7 @@ import { useShallow } from "zustand/react/shallow";
|
||||
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { useGraphStore } from "../../../stores/graphStore";
|
||||
import { useEdgeStore } from "../../../stores/edgeStore";
|
||||
|
||||
export const useFlowRealtime = () => {
|
||||
const api = useBackendAPI();
|
||||
@@ -21,6 +22,12 @@ export const useFlowRealtime = () => {
|
||||
const setIsGraphRunning = useGraphStore(
|
||||
useShallow((state) => state.setIsGraphRunning),
|
||||
);
|
||||
const updateEdgeBeads = useEdgeStore(
|
||||
useShallow((state) => state.updateEdgeBeads),
|
||||
);
|
||||
const resetEdgeBeads = useEdgeStore(
|
||||
useShallow((state) => state.resetEdgeBeads),
|
||||
);
|
||||
|
||||
const [{ flowExecutionID, flowID }] = useQueryStates({
|
||||
flowExecutionID: parseAsString,
|
||||
@@ -34,12 +41,12 @@ export const useFlowRealtime = () => {
|
||||
if (data.graph_exec_id != flowExecutionID) {
|
||||
return;
|
||||
}
|
||||
// TODO: Update the states of nodes
|
||||
updateNodeExecutionResult(
|
||||
data.node_id,
|
||||
data as unknown as NodeExecutionResult,
|
||||
);
|
||||
updateStatus(data.node_id, data.status);
|
||||
updateEdgeBeads(data.node_id, data as unknown as NodeExecutionResult);
|
||||
},
|
||||
);
|
||||
|
||||
@@ -82,8 +89,9 @@ export const useFlowRealtime = () => {
|
||||
deregisterNodeExecutionEvent();
|
||||
deregisterGraphExecutionSubscription();
|
||||
deregisterGraphExecutionStatusEvent();
|
||||
resetEdgeBeads();
|
||||
};
|
||||
}, [api, flowExecutionID]);
|
||||
}, [api, flowExecutionID, resetEdgeBeads]);
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
@@ -1,17 +1,30 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import {
|
||||
BaseEdge,
|
||||
Edge as XYEdge,
|
||||
EdgeLabelRenderer,
|
||||
EdgeProps,
|
||||
getBezierPath,
|
||||
} from "@xyflow/react";
|
||||
|
||||
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
||||
import { XIcon } from "@phosphor-icons/react";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { NodeExecutionResult } from "@/lib/autogpt-server-api";
|
||||
import { JSBeads } from "./components/JSBeads";
|
||||
|
||||
export type CustomEdgeData = {
|
||||
isStatic?: boolean;
|
||||
beadUp?: number;
|
||||
beadDown?: number;
|
||||
beadData?: Map<string, NodeExecutionResult["status"]>;
|
||||
};
|
||||
|
||||
export type CustomEdge = XYEdge<CustomEdgeData, "custom">;
|
||||
import { memo } from "react";
|
||||
|
||||
const CustomEdge = ({
|
||||
id,
|
||||
data,
|
||||
sourceX,
|
||||
sourceY,
|
||||
targetX,
|
||||
@@ -20,8 +33,8 @@ const CustomEdge = ({
|
||||
targetPosition,
|
||||
markerEnd,
|
||||
selected,
|
||||
}: EdgeProps) => {
|
||||
const removeConnection = useEdgeStore((state) => state.removeConnection);
|
||||
}: EdgeProps<CustomEdge>) => {
|
||||
const removeConnection = useEdgeStore((state) => state.removeEdge);
|
||||
const [edgePath, labelX, labelY] = getBezierPath({
|
||||
sourceX,
|
||||
sourceY,
|
||||
@@ -31,14 +44,27 @@ const CustomEdge = ({
|
||||
targetPosition,
|
||||
});
|
||||
|
||||
const isStatic = data?.isStatic ?? false;
|
||||
const beadUp = data?.beadUp ?? 0;
|
||||
const beadDown = data?.beadDown ?? 0;
|
||||
|
||||
return (
|
||||
<>
|
||||
<BaseEdge
|
||||
path={edgePath}
|
||||
markerEnd={markerEnd}
|
||||
className={
|
||||
selected ? "[stroke:#555]" : "[stroke:#555]80 hover:[stroke:#555]"
|
||||
}
|
||||
className={cn(
|
||||
isStatic && "!stroke-[1.5px] [stroke-dasharray:6]",
|
||||
selected
|
||||
? "stroke-zinc-800"
|
||||
: "stroke-zinc-500/50 hover:stroke-zinc-500",
|
||||
)}
|
||||
/>
|
||||
<JSBeads
|
||||
beadUp={beadUp}
|
||||
beadDown={beadDown}
|
||||
edgePath={edgePath}
|
||||
beadsKey={`beads-${id}-${sourceX}-${sourceY}-${targetX}-${targetY}`}
|
||||
/>
|
||||
<EdgeLabelRenderer>
|
||||
<Button
|
||||
|
||||
@@ -0,0 +1,167 @@
|
||||
// This component uses JS animation [It's replica of legacy builder]
|
||||
// Problem - It lags at real time updates, because of state change
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import {
|
||||
getLengthOfPathInPixels,
|
||||
getPointAtT,
|
||||
getTForDistance,
|
||||
setTargetPositions,
|
||||
} from "../helpers";
|
||||
|
||||
const BEAD_DIAMETER = 10;
|
||||
const ANIMATION_DURATION = 500;
|
||||
|
||||
interface Bead {
|
||||
t: number;
|
||||
targetT: number;
|
||||
startTime: number;
|
||||
}
|
||||
|
||||
interface BeadsProps {
|
||||
beadUp: number;
|
||||
beadDown: number;
|
||||
edgePath: string;
|
||||
beadsKey: string;
|
||||
isStatic?: boolean;
|
||||
}
|
||||
|
||||
export const JSBeads = ({
|
||||
beadUp,
|
||||
beadDown,
|
||||
edgePath,
|
||||
beadsKey,
|
||||
}: BeadsProps) => {
|
||||
const [beads, setBeads] = useState<{
|
||||
beads: Bead[];
|
||||
created: number;
|
||||
destroyed: number;
|
||||
}>({ beads: [], created: 0, destroyed: 0 });
|
||||
|
||||
const beadsRef = useRef(beads);
|
||||
const totalLength = getLengthOfPathInPixels(edgePath);
|
||||
const animationFrameRef = useRef<number | null>(null);
|
||||
const lastFrameTimeRef = useRef<number>(0);
|
||||
|
||||
const pathRef = useRef<SVGPathElement | null>(null);
|
||||
|
||||
const getPointAtTWrapper = (t: number) => {
|
||||
return getPointAtT(t, edgePath, pathRef);
|
||||
};
|
||||
|
||||
const getTForDistanceWrapper = (distanceFromEnd: number) => {
|
||||
return getTForDistance(distanceFromEnd, totalLength);
|
||||
};
|
||||
|
||||
const setTargetPositionsWrapper = useCallback(
|
||||
(beads: Bead[]) => {
|
||||
return setTargetPositions(beads, BEAD_DIAMETER, getTForDistanceWrapper);
|
||||
},
|
||||
[getTForDistanceWrapper],
|
||||
);
|
||||
|
||||
beadsRef.current = beads;
|
||||
|
||||
useEffect(() => {
|
||||
pathRef.current = null;
|
||||
}, [edgePath]);
|
||||
|
||||
useEffect(() => {
|
||||
if (
|
||||
beadUp === 0 &&
|
||||
beadDown === 0 &&
|
||||
(beads.created > 0 || beads.destroyed > 0)
|
||||
) {
|
||||
setBeads({ beads: [], created: 0, destroyed: 0 });
|
||||
return;
|
||||
}
|
||||
|
||||
// Adding beads
|
||||
if (beadUp > beads.created) {
|
||||
setBeads(({ beads, created, destroyed }) => {
|
||||
const newBeads = [];
|
||||
for (let i = 0; i < beadUp - created; i++) {
|
||||
newBeads.push({ t: 0, targetT: 0, startTime: Date.now() });
|
||||
}
|
||||
|
||||
const b = setTargetPositionsWrapper([...beads, ...newBeads]);
|
||||
return { beads: b, created: beadUp, destroyed };
|
||||
});
|
||||
}
|
||||
|
||||
const animate = (currentTime: number) => {
|
||||
const beads = beadsRef.current;
|
||||
|
||||
if (
|
||||
(beadUp === beads.created && beads.created === beads.destroyed) ||
|
||||
beads.beads.every((bead) => bead.t >= bead.targetT)
|
||||
) {
|
||||
animationFrameRef.current = null;
|
||||
return;
|
||||
}
|
||||
|
||||
const deltaTime = lastFrameTimeRef.current
|
||||
? currentTime - lastFrameTimeRef.current
|
||||
: 16;
|
||||
lastFrameTimeRef.current = currentTime;
|
||||
|
||||
setBeads(({ beads, created, destroyed }) => {
|
||||
let destroyedCount = 0;
|
||||
|
||||
const newBeads = beads
|
||||
.map((bead) => {
|
||||
const progressIncrement = deltaTime / ANIMATION_DURATION;
|
||||
const t = Math.min(
|
||||
bead.t + bead.targetT * progressIncrement,
|
||||
bead.targetT,
|
||||
);
|
||||
|
||||
return { ...bead, t };
|
||||
})
|
||||
.filter((bead, index) => {
|
||||
const removeCount = beadDown - destroyed;
|
||||
if (bead.t >= bead.targetT && index < removeCount) {
|
||||
destroyedCount++;
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
return {
|
||||
beads: setTargetPositionsWrapper(newBeads),
|
||||
created,
|
||||
destroyed: destroyed + destroyedCount,
|
||||
};
|
||||
});
|
||||
|
||||
animationFrameRef.current = requestAnimationFrame(animate);
|
||||
};
|
||||
|
||||
lastFrameTimeRef.current = 0;
|
||||
animationFrameRef.current = requestAnimationFrame(animate);
|
||||
|
||||
return () => {
|
||||
if (animationFrameRef.current !== null) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
animationFrameRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [beadUp, beadDown, setTargetPositionsWrapper]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{beads.beads.map((bead, index) => {
|
||||
const pos = getPointAtTWrapper(bead.t);
|
||||
return (
|
||||
<circle
|
||||
key={`${beadsKey}-${index}`}
|
||||
cx={pos.x}
|
||||
cy={pos.y}
|
||||
r={BEAD_DIAMETER / 2}
|
||||
fill="#8d8d95"
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,85 @@
|
||||
// This component uses SVG animation [Will see in future if we can make it work]
|
||||
// Problem - it doesn't work with real time updates
|
||||
|
||||
import { useEffect, useMemo, useRef, useState } from "react";
|
||||
import { getLengthOfPathInPixels } from "../helpers";
|
||||
|
||||
const BEAD_SPACING = 12;
|
||||
const BASE_STOP_DISTANCE = 15;
|
||||
const ANIMATION_DURATION = 0.5;
|
||||
const ANIMATION_DELAY_PER_BEAD = 0.05;
|
||||
|
||||
interface BeadsProps {
|
||||
beadUp: number;
|
||||
beadDown: number;
|
||||
edgePath: string;
|
||||
beadsKey: string;
|
||||
}
|
||||
|
||||
export const SVGBeads = ({
|
||||
beadUp,
|
||||
beadDown,
|
||||
edgePath,
|
||||
beadsKey,
|
||||
}: BeadsProps) => {
|
||||
const [removedBeads, setRemovedBeads] = useState<Set<number>>(new Set());
|
||||
const animateRef = useRef<SVGAElement | null>(null);
|
||||
|
||||
const visibleBeads = useMemo(() => {
|
||||
return Array.from({ length: Math.max(0, beadUp) }, (_, i) => i).filter(
|
||||
(index) => !removedBeads.has(index),
|
||||
);
|
||||
}, [beadUp, removedBeads]);
|
||||
|
||||
const totalLength = getLengthOfPathInPixels(edgePath);
|
||||
|
||||
useEffect(() => {
|
||||
setRemovedBeads(new Set());
|
||||
}, [beadUp]);
|
||||
|
||||
useEffect(() => {
|
||||
const elem = animateRef.current;
|
||||
if (elem) {
|
||||
const handleEnd = () => {
|
||||
if (beadDown > 0) {
|
||||
const beadsToRemove = Array.from(
|
||||
{ length: beadDown },
|
||||
(_, i) => beadUp - beadDown + i,
|
||||
);
|
||||
|
||||
beadsToRemove.forEach((beadIndex) => {
|
||||
setRemovedBeads((prev) => new Set(prev).add(beadIndex));
|
||||
});
|
||||
}
|
||||
};
|
||||
elem.addEventListener("endEvent", handleEnd);
|
||||
return () => elem.removeEventListener("endEvent", handleEnd);
|
||||
}
|
||||
}, [beadUp, beadDown]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{visibleBeads.map((index) => {
|
||||
const stopDistance = BASE_STOP_DISTANCE + index * BEAD_SPACING;
|
||||
const beadStopPoint =
|
||||
Math.max(0, totalLength - stopDistance) / totalLength;
|
||||
|
||||
return (
|
||||
<circle key={`${beadsKey}-${index}`} r="5" fill="#8d8d95">
|
||||
<animateMotion
|
||||
ref={animateRef}
|
||||
dur={`${ANIMATION_DURATION}s`}
|
||||
repeatCount="1"
|
||||
fill="freeze"
|
||||
path={edgePath}
|
||||
begin={`${index * ANIMATION_DELAY_PER_BEAD}s`}
|
||||
keyPoints={`0;${beadStopPoint}`}
|
||||
keyTimes="0;1"
|
||||
calcMode="linear"
|
||||
/>
|
||||
</circle>
|
||||
);
|
||||
})}
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -10,3 +10,53 @@ export const convertConnectionsToBackendLinks = (
|
||||
source_name: c.sourceHandle || "",
|
||||
sink_name: c.targetHandle || "",
|
||||
}));
|
||||
|
||||
// ------------------- SVG Beads helpers -------------------
|
||||
|
||||
export const getLengthOfPathInPixels = (path: string) => {
|
||||
const pathElement = document.createElementNS(
|
||||
"http://www.w3.org/2000/svg",
|
||||
"path",
|
||||
);
|
||||
pathElement.setAttribute("d", path);
|
||||
return pathElement.getTotalLength();
|
||||
};
|
||||
|
||||
// ------------------- JS Beads helpers -------------------
|
||||
|
||||
export const getPointAtT = (
|
||||
t: number,
|
||||
edgePath: string,
|
||||
pathRef: React.MutableRefObject<SVGPathElement | null>,
|
||||
) => {
|
||||
if (!pathRef.current) {
|
||||
const tempPath = document.createElementNS(
|
||||
"http://www.w3.org/2000/svg",
|
||||
"path",
|
||||
);
|
||||
tempPath.setAttribute("d", edgePath);
|
||||
pathRef.current = tempPath;
|
||||
}
|
||||
|
||||
const totalLength = pathRef.current.getTotalLength();
|
||||
const point = pathRef.current.getPointAtLength(t * totalLength);
|
||||
return { x: point.x, y: point.y };
|
||||
};
|
||||
|
||||
export const getTForDistance = (
|
||||
distanceFromEnd: number,
|
||||
totalLength: number,
|
||||
) => {
|
||||
return Math.max(0, Math.min(1, 1 - distanceFromEnd / totalLength));
|
||||
};
|
||||
|
||||
export const setTargetPositions = (
|
||||
beads: { t: number; targetT: number; startTime: number }[],
|
||||
beadDiameter: number,
|
||||
getTForDistanceFunc: (distanceFromEnd: number) => number,
|
||||
) => {
|
||||
return beads.map((bead, index) => ({
|
||||
...bead,
|
||||
targetT: getTForDistanceFunc(beadDiameter * (index + 1)),
|
||||
}));
|
||||
};
|
||||
|
||||
@@ -1,35 +1,12 @@
|
||||
import {
|
||||
Connection as RFConnection,
|
||||
Edge as RFEdge,
|
||||
MarkerType,
|
||||
EdgeChange,
|
||||
} from "@xyflow/react";
|
||||
import { Connection as RFConnection, EdgeChange } from "@xyflow/react";
|
||||
import { useEdgeStore } from "@/app/(platform)/build/stores/edgeStore";
|
||||
import { useCallback, useMemo } from "react";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useCallback } from "react";
|
||||
import { useNodeStore } from "../../../stores/nodeStore";
|
||||
|
||||
export const useCustomEdge = () => {
|
||||
const connections = useEdgeStore(useShallow((s) => s.connections));
|
||||
const addConnection = useEdgeStore((s) => s.addConnection);
|
||||
const removeConnection = useEdgeStore((s) => s.removeConnection);
|
||||
|
||||
const edges: RFEdge[] = useMemo(
|
||||
() =>
|
||||
connections.map((c) => ({
|
||||
id: c.edge_id,
|
||||
type: "custom",
|
||||
source: c.source,
|
||||
target: c.target,
|
||||
sourceHandle: c.sourceHandle,
|
||||
targetHandle: c.targetHandle,
|
||||
markerEnd: {
|
||||
type: MarkerType.ArrowClosed,
|
||||
strokeWidth: 2,
|
||||
color: "#555",
|
||||
},
|
||||
})),
|
||||
[connections],
|
||||
);
|
||||
const edges = useEdgeStore((s) => s.edges);
|
||||
const addEdge = useEdgeStore((s) => s.addEdge);
|
||||
const removeEdge = useEdgeStore((s) => s.removeEdge);
|
||||
|
||||
const onConnect = useCallback(
|
||||
(conn: RFConnection) => {
|
||||
@@ -40,31 +17,42 @@ export const useCustomEdge = () => {
|
||||
!conn.targetHandle
|
||||
)
|
||||
return;
|
||||
const exists = connections.some(
|
||||
(c) =>
|
||||
c.source === conn.source &&
|
||||
c.target === conn.target &&
|
||||
c.sourceHandle === conn.sourceHandle &&
|
||||
c.targetHandle === conn.targetHandle,
|
||||
|
||||
const exists = edges.some(
|
||||
(e) =>
|
||||
e.source === conn.source &&
|
||||
e.target === conn.target &&
|
||||
e.sourceHandle === conn.sourceHandle &&
|
||||
e.targetHandle === conn.targetHandle,
|
||||
);
|
||||
if (exists) return;
|
||||
addConnection({
|
||||
|
||||
const nodes = useNodeStore.getState().nodes;
|
||||
const isStatic = nodes.find((n) => n.id === conn.source)?.data
|
||||
?.staticOutput;
|
||||
|
||||
addEdge({
|
||||
source: conn.source,
|
||||
target: conn.target,
|
||||
sourceHandle: conn.sourceHandle,
|
||||
targetHandle: conn.targetHandle,
|
||||
data: {
|
||||
isStatic,
|
||||
},
|
||||
});
|
||||
},
|
||||
[connections, addConnection],
|
||||
[edges, addEdge],
|
||||
);
|
||||
|
||||
const onEdgesChange = useCallback(
|
||||
(changes: EdgeChange[]) => {
|
||||
changes.forEach((ch) => {
|
||||
if (ch.type === "remove") removeConnection(ch.id);
|
||||
changes.forEach((change) => {
|
||||
if (change.type === "remove") {
|
||||
removeEdge(change.id);
|
||||
}
|
||||
});
|
||||
},
|
||||
[removeConnection],
|
||||
[removeEdge],
|
||||
);
|
||||
|
||||
return { edges, onConnect, onEdgesChange };
|
||||
|
||||
@@ -2,12 +2,23 @@ import React from "react";
|
||||
import { Node as XYNode, NodeProps } from "@xyflow/react";
|
||||
import { RJSFSchema } from "@rjsf/utils";
|
||||
import { BlockUIType } from "../../../types";
|
||||
import { StickyNoteBlock } from "./StickyNoteBlock";
|
||||
import { StickyNoteBlock } from "./components/StickyNoteBlock";
|
||||
import { BlockInfoCategoriesItem } from "@/app/api/__generated__/models/blockInfoCategoriesItem";
|
||||
import { StandardNodeBlock } from "./StandardNodeBlock";
|
||||
import { BlockCost } from "@/app/api/__generated__/models/blockCost";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
import { NodeContainer } from "./components/NodeContainer";
|
||||
import { NodeHeader } from "./components/NodeHeader";
|
||||
import { FormCreator } from "../FormCreator";
|
||||
import { preprocessInputSchema } from "@/components/renderers/input-renderer/utils/input-schema-pre-processor";
|
||||
import { OutputHandler } from "../OutputHandler";
|
||||
import { NodeAdvancedToggle } from "./components/NodeAdvancedToggle";
|
||||
import { NodeDataRenderer } from "./components/NodeOutput/NodeOutput";
|
||||
import { NodeExecutionBadge } from "./components/NodeExecutionBadge";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { WebhookDisclaimer } from "./components/WebhookDisclaimer";
|
||||
import { AyrshareConnectButton } from "./components/AyrshareConnectButton";
|
||||
import { NodeModelMetadata } from "@/app/api/__generated__/models/nodeModelMetadata";
|
||||
|
||||
export type CustomNodeData = {
|
||||
hardcodedValues: {
|
||||
@@ -21,9 +32,11 @@ export type CustomNodeData = {
|
||||
block_id: string;
|
||||
status?: AgentExecutionStatus;
|
||||
nodeExecutionResult?: NodeExecutionResult;
|
||||
staticOutput?: boolean;
|
||||
// TODO : We need better type safety for the following backend fields.
|
||||
costs: BlockCost[];
|
||||
categories: BlockInfoCategoriesItem[];
|
||||
metadata?: NodeModelMetadata;
|
||||
};
|
||||
|
||||
export type CustomNode = XYNode<CustomNodeData, "custom">;
|
||||
@@ -31,17 +44,59 @@ export type CustomNode = XYNode<CustomNodeData, "custom">;
|
||||
export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
|
||||
({ data, id: nodeId, selected }) => {
|
||||
if (data.uiType === BlockUIType.NOTE) {
|
||||
return <StickyNoteBlock selected={selected} data={data} id={nodeId} />;
|
||||
}
|
||||
|
||||
if (data.uiType === BlockUIType.STANDARD) {
|
||||
return (
|
||||
<StandardNodeBlock data={data} selected={selected} nodeId={nodeId} />
|
||||
<StickyNoteBlock data={data} selected={selected} nodeId={nodeId} />
|
||||
);
|
||||
}
|
||||
|
||||
const showHandles =
|
||||
data.uiType !== BlockUIType.INPUT &&
|
||||
data.uiType !== BlockUIType.WEBHOOK &&
|
||||
data.uiType !== BlockUIType.WEBHOOK_MANUAL;
|
||||
|
||||
const isWebhook = [
|
||||
BlockUIType.WEBHOOK,
|
||||
BlockUIType.WEBHOOK_MANUAL,
|
||||
].includes(data.uiType);
|
||||
|
||||
const isAyrshare = data.uiType === BlockUIType.AYRSHARE;
|
||||
|
||||
const inputSchema =
|
||||
data.uiType === BlockUIType.AGENT
|
||||
? (data.hardcodedValues.input_schema ?? {})
|
||||
: data.inputSchema;
|
||||
|
||||
const outputSchema =
|
||||
data.uiType === BlockUIType.AGENT
|
||||
? (data.hardcodedValues.output_schema ?? {})
|
||||
: data.outputSchema;
|
||||
|
||||
// Currently all blockTypes design are similar - that's why i am using the same component for all of them
|
||||
// If in future - if we need some drastic change in some blockTypes design - we can create separate components for them
|
||||
return (
|
||||
<StandardNodeBlock data={data} selected={selected} nodeId={nodeId} />
|
||||
<NodeContainer selected={selected} nodeId={nodeId}>
|
||||
<div className="rounded-xlarge bg-white">
|
||||
<NodeHeader data={data} nodeId={nodeId} />
|
||||
{isWebhook && <WebhookDisclaimer nodeId={nodeId} />}
|
||||
{isAyrshare && <AyrshareConnectButton />}
|
||||
<FormCreator
|
||||
jsonSchema={preprocessInputSchema(inputSchema)}
|
||||
nodeId={nodeId}
|
||||
uiType={data.uiType}
|
||||
className={cn(
|
||||
"bg-white pr-6",
|
||||
isWebhook && "pointer-events-none opacity-50",
|
||||
)}
|
||||
showHandles={showHandles}
|
||||
/>
|
||||
<NodeAdvancedToggle nodeId={nodeId} />
|
||||
{data.uiType != BlockUIType.OUTPUT && (
|
||||
<OutputHandler outputSchema={outputSchema} nodeId={nodeId} />
|
||||
)}
|
||||
<NodeDataRenderer nodeId={nodeId} />
|
||||
</div>
|
||||
<NodeExecutionBadge nodeId={nodeId} />
|
||||
</NodeContainer>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import { CustomNodeData } from "./CustomNode";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { FormCreator } from "../FormCreator";
|
||||
import { preprocessInputSchema } from "@/components/renderers/input-renderer/utils/input-schema-pre-processor";
|
||||
import { Switch } from "@/components/atoms/Switch/Switch";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { OutputHandler } from "../OutputHandler";
|
||||
import { NodeCost } from "./components/NodeCost";
|
||||
import { NodeBadges } from "./components/NodeBadges";
|
||||
import { NodeExecutionBadge } from "./components/NodeExecutionBadge";
|
||||
import { nodeStyleBasedOnStatus } from "./helpers";
|
||||
import { NodeDataRenderer } from "./components/NodeOutput/NodeOutput";
|
||||
import { NodeContextMenu } from "./components/NodeContextMenu";
|
||||
|
||||
type StandardNodeBlockType = {
|
||||
data: CustomNodeData;
|
||||
selected: boolean;
|
||||
nodeId: string;
|
||||
};
|
||||
export const StandardNodeBlock = ({
|
||||
data,
|
||||
selected,
|
||||
nodeId,
|
||||
}: StandardNodeBlockType) => {
|
||||
const showAdvanced = useNodeStore(
|
||||
(state) => state.nodeAdvancedStates[nodeId] || false,
|
||||
);
|
||||
const setShowAdvanced = useNodeStore((state) => state.setShowAdvanced);
|
||||
const status = useNodeStore((state) => state.getNodeStatus(nodeId));
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"z-12 max-w-[370px] rounded-xlarge shadow-lg shadow-slate-900/5 ring-1 ring-slate-200/60 backdrop-blur-sm",
|
||||
selected && "shadow-2xl ring-2 ring-slate-200",
|
||||
status && nodeStyleBasedOnStatus[status],
|
||||
)}
|
||||
>
|
||||
<div className="rounded-xlarge bg-white">
|
||||
{/* Header */}
|
||||
<div className="flex h-auto items-start justify-between gap-2 rounded-xlarge border-b border-slate-200/50 bg-gradient-to-r from-slate-50/80 to-white/90 px-4 py-4">
|
||||
<div className="flex flex-col gap-2">
|
||||
{/* Upper section */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="large-semibold"
|
||||
className="tracking-tight text-slate-800"
|
||||
>
|
||||
{beautifyString(data.title)}
|
||||
</Text>
|
||||
<Text variant="small" className="!font-medium !text-slate-500">
|
||||
#{nodeId.split("-")[0]}
|
||||
</Text>
|
||||
</div>
|
||||
{/* Lower section */}
|
||||
<div className="flex space-x-2">
|
||||
<NodeCost blockCosts={data.costs} nodeId={nodeId} />
|
||||
<NodeBadges categories={data.categories} />
|
||||
</div>
|
||||
</div>
|
||||
<NodeContextMenu
|
||||
subGraphID={data.hardcodedValues?.graph_id}
|
||||
nodeId={nodeId}
|
||||
/>
|
||||
</div>
|
||||
{/* Input Handles */}
|
||||
<div className="bg-white pr-6">
|
||||
<FormCreator
|
||||
jsonSchema={preprocessInputSchema(data.inputSchema)}
|
||||
nodeId={nodeId}
|
||||
uiType={data.uiType}
|
||||
/>
|
||||
</div>
|
||||
{/* Advanced Button */}
|
||||
<div className="flex items-center justify-between gap-2 border-t border-slate-200/50 bg-white px-5 py-3.5">
|
||||
<Text variant="body" className="font-medium text-slate-700">
|
||||
Advanced
|
||||
</Text>
|
||||
<Switch
|
||||
onCheckedChange={(checked) => setShowAdvanced(nodeId, checked)}
|
||||
checked={showAdvanced}
|
||||
/>
|
||||
</div>
|
||||
{/* Output Handles */}
|
||||
<OutputHandler outputSchema={data.outputSchema} nodeId={nodeId} />
|
||||
|
||||
<NodeDataRenderer nodeId={nodeId} />
|
||||
</div>
|
||||
{status && <NodeExecutionBadge status={status} />}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,58 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState } from "react";
|
||||
|
||||
import { Key } from "lucide-react";
|
||||
import { getV1GetAyrshareSsoUrl } from "@/app/api/__generated__/endpoints/integrations/integrations";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
|
||||
// This SSO button is not a part of inputSchema - that's why we are not rendering it using Input renderer
|
||||
export const AyrshareConnectButton = () => {
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const { toast } = useToast();
|
||||
|
||||
const handleSSOLogin = async () => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const { data, status } = await getV1GetAyrshareSsoUrl();
|
||||
if (status !== 200) {
|
||||
throw new Error(data.detail);
|
||||
}
|
||||
const popup = window.open(data.sso_url, "_blank", "popup=true");
|
||||
if (!popup) {
|
||||
throw new Error(
|
||||
"Please allow popups for this site to be able to login with Ayrshare",
|
||||
);
|
||||
}
|
||||
toast({
|
||||
title: "Success",
|
||||
description: "Please complete the authentication in the popup window",
|
||||
});
|
||||
} catch (error) {
|
||||
toast({
|
||||
title: "Error",
|
||||
description: `Error getting SSO URL: ${error}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
// TODO :Need better UI to show user which social media accounts are connected
|
||||
<div className="mt-4 flex flex-col gap-2 px-4">
|
||||
<Button
|
||||
type="button"
|
||||
onClick={handleSSOLogin}
|
||||
disabled={isLoading}
|
||||
className="h-fit w-full py-2"
|
||||
loading={isLoading}
|
||||
leftIcon={<Key className="mr-2 h-4 w-4" />}
|
||||
>
|
||||
Connect Social Media Accounts
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,21 @@
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { Switch } from "@/components/atoms/Switch/Switch";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
export const NodeAdvancedToggle = ({ nodeId }: { nodeId: string }) => {
|
||||
const showAdvanced = useNodeStore(
|
||||
(state) => state.nodeAdvancedStates[nodeId] || false,
|
||||
);
|
||||
const setShowAdvanced = useNodeStore((state) => state.setShowAdvanced);
|
||||
return (
|
||||
<div className="flex items-center justify-between gap-2 rounded-b-xlarge border-t border-slate-200/50 bg-white px-5 py-3.5">
|
||||
<Text variant="body" className="font-medium text-slate-700">
|
||||
Advanced
|
||||
</Text>
|
||||
<Switch
|
||||
onCheckedChange={(checked) => setShowAdvanced(nodeId, checked)}
|
||||
checked={showAdvanced}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,30 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import { nodeStyleBasedOnStatus } from "../helpers";
|
||||
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
|
||||
export const NodeContainer = ({
|
||||
children,
|
||||
nodeId,
|
||||
selected,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
nodeId: string;
|
||||
selected: boolean;
|
||||
}) => {
|
||||
const status = useNodeStore(
|
||||
useShallow((state) => state.getNodeStatus(nodeId)),
|
||||
);
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"z-12 max-w-[370px] rounded-xlarge ring-1 ring-slate-200/60",
|
||||
selected && "shadow-lg ring-2 ring-slate-200",
|
||||
status && nodeStyleBasedOnStatus[status],
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -47,7 +47,7 @@ export const NodeContextMenu = ({
|
||||
>
|
||||
<DropdownMenuItem onClick={handleCopy} className="hover:rounded-xlarge">
|
||||
<Copy className="mr-2 h-4 w-4" />
|
||||
Copy
|
||||
Copy Node
|
||||
</DropdownMenuItem>
|
||||
|
||||
{subGraphID && (
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
import { Badge } from "@/components/__legacy__/ui/badge";
|
||||
import { LoadingSpinner } from "@/components/__legacy__/ui/loading";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
|
||||
const statusStyles: Record<AgentExecutionStatus, string> = {
|
||||
INCOMPLETE: "text-slate-700 border-slate-400",
|
||||
@@ -12,11 +14,11 @@ const statusStyles: Record<AgentExecutionStatus, string> = {
|
||||
FAILED: "text-red-700 border-red-400",
|
||||
};
|
||||
|
||||
export const NodeExecutionBadge = ({
|
||||
status,
|
||||
}: {
|
||||
status: AgentExecutionStatus;
|
||||
}) => {
|
||||
export const NodeExecutionBadge = ({ nodeId }: { nodeId: string }) => {
|
||||
const status = useNodeStore(
|
||||
useShallow((state) => state.getNodeStatus(nodeId)),
|
||||
);
|
||||
if (!status) return null;
|
||||
return (
|
||||
<div className="flex items-center justify-end rounded-b-xl py-2 pr-4">
|
||||
<Badge
|
||||
|
||||
@@ -0,0 +1,105 @@
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import { NodeCost } from "./NodeCost";
|
||||
import { NodeBadges } from "./NodeBadges";
|
||||
import { NodeContextMenu } from "./NodeContextMenu";
|
||||
import { CustomNodeData } from "../CustomNode";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useState } from "react";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
|
||||
export const NodeHeader = ({
|
||||
data,
|
||||
nodeId,
|
||||
}: {
|
||||
data: CustomNodeData;
|
||||
nodeId: string;
|
||||
}) => {
|
||||
const updateNodeData = useNodeStore((state) => state.updateNodeData);
|
||||
const title = (data.metadata?.customized_name as string) || data.title;
|
||||
const [isEditingTitle, setIsEditingTitle] = useState(false);
|
||||
const [editedTitle, setEditedTitle] = useState(title);
|
||||
|
||||
const handleTitleEdit = () => {
|
||||
updateNodeData(nodeId, {
|
||||
metadata: { ...data.metadata, customized_name: editedTitle },
|
||||
});
|
||||
setIsEditingTitle(false);
|
||||
};
|
||||
|
||||
const handleTitleKeyDown = (e: React.KeyboardEvent<HTMLInputElement>) => {
|
||||
if (e.key === "Enter") handleTitleEdit();
|
||||
if (e.key === "Escape") {
|
||||
setEditedTitle(title);
|
||||
setIsEditingTitle(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex h-auto flex-col gap-1 rounded-xlarge border-b border-slate-200/50 bg-gradient-to-r from-slate-50/80 to-white/90 px-4 py-4 pt-3">
|
||||
{/* Title row with context menu */}
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<div className="flex min-w-0 flex-1 items-center gap-2">
|
||||
<div
|
||||
onDoubleClick={() => setIsEditingTitle(true)}
|
||||
className="flex w-fit min-w-0 flex-1 items-center hover:cursor-pointer"
|
||||
>
|
||||
{isEditingTitle ? (
|
||||
<input
|
||||
id="node-title-input"
|
||||
value={editedTitle}
|
||||
onChange={(e) => setEditedTitle(e.target.value)}
|
||||
autoFocus
|
||||
className={cn(
|
||||
"m-0 h-fit w-full border-none bg-transparent p-0 focus:outline-none focus:ring-0",
|
||||
"font-sans text-[1rem] font-semibold leading-[1.5rem] text-zinc-800",
|
||||
)}
|
||||
onBlur={handleTitleEdit}
|
||||
onKeyDown={handleTitleKeyDown}
|
||||
/>
|
||||
) : (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<Text variant="large-semibold" className="line-clamp-1">
|
||||
{beautifyString(title)}
|
||||
</Text>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>{beautifyString(title)}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex items-center gap-2">
|
||||
<Text
|
||||
variant="small"
|
||||
className="shrink-0 !font-medium !text-slate-500"
|
||||
>
|
||||
#{nodeId.split("-")[0]}
|
||||
</Text>
|
||||
<NodeContextMenu
|
||||
subGraphID={data.hardcodedValues?.graph_id}
|
||||
nodeId={nodeId}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Metadata row */}
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<NodeCost blockCosts={data.costs} nodeId={nodeId} />
|
||||
<NodeBadges categories={data.categories} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,19 +1,19 @@
|
||||
import { useMemo } from "react";
|
||||
import { FormCreator } from "../FormCreator";
|
||||
import { FormCreator } from "../../FormCreator";
|
||||
import { preprocessInputSchema } from "@/components/renderers/input-renderer/utils/input-schema-pre-processor";
|
||||
import { CustomNodeData } from "./CustomNode";
|
||||
import { CustomNodeData } from "../CustomNode";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type StickyNoteBlockType = {
|
||||
selected: boolean;
|
||||
data: CustomNodeData;
|
||||
id: string;
|
||||
nodeId: string;
|
||||
};
|
||||
|
||||
export const StickyNoteBlock = ({ data, id }: StickyNoteBlockType) => {
|
||||
export const StickyNoteBlock = ({ data, nodeId }: StickyNoteBlockType) => {
|
||||
const { angle, color } = useMemo(() => {
|
||||
const hash = id.split("").reduce((acc, char) => {
|
||||
const hash = nodeId.split("").reduce((acc, char) => {
|
||||
return char.charCodeAt(0) + ((acc << 5) - acc);
|
||||
}, 0);
|
||||
|
||||
@@ -31,7 +31,7 @@ export const StickyNoteBlock = ({ data, id }: StickyNoteBlockType) => {
|
||||
angle: (hash % 7) - 3,
|
||||
color: colors[Math.abs(hash) % colors.length],
|
||||
};
|
||||
}, [id]);
|
||||
}, [nodeId]);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -42,11 +42,11 @@ export const StickyNoteBlock = ({ data, id }: StickyNoteBlockType) => {
|
||||
style={{ transform: `rotate(${angle}deg)` }}
|
||||
>
|
||||
<Text variant="h3" className="tracking-tight text-slate-800">
|
||||
Notes #{id.split("-")[0]}
|
||||
Notes #{nodeId.split("-")[0]}
|
||||
</Text>
|
||||
<FormCreator
|
||||
jsonSchema={preprocessInputSchema(data.inputSchema)}
|
||||
nodeId={id}
|
||||
nodeId={nodeId}
|
||||
uiType={data.uiType}
|
||||
/>
|
||||
</div>
|
||||
@@ -0,0 +1,58 @@
|
||||
import { Alert, AlertDescription } from "@/components/molecules/Alert/Alert";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import Link from "next/link";
|
||||
import { useGetV2GetLibraryAgentByGraphId } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { useQueryStates, parseAsString } from "nuqs";
|
||||
import { isValidUUID } from "@/app/(platform)/chat/helpers";
|
||||
|
||||
export const WebhookDisclaimer = ({ nodeId }: { nodeId: string }) => {
|
||||
const [{ flowID }] = useQueryStates({
|
||||
flowID: parseAsString,
|
||||
});
|
||||
|
||||
// for a single agentId, we are fetching everything - need to make it better in the future
|
||||
const { data: libraryAgent } = useGetV2GetLibraryAgentByGraphId(
|
||||
flowID ?? "",
|
||||
{},
|
||||
{
|
||||
query: {
|
||||
select: (x) => {
|
||||
return x.data as LibraryAgent;
|
||||
},
|
||||
enabled: !!flowID,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const isNodeSaved = isValidUUID(nodeId);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="px-4 pt-4">
|
||||
<Alert className="mb-3 rounded-xlarge">
|
||||
<AlertDescription>
|
||||
<Text variant="small-medium">
|
||||
You can set up and manage this trigger in your{" "}
|
||||
<Link
|
||||
href={
|
||||
libraryAgent
|
||||
? `/library/agents/${libraryAgent.id}`
|
||||
: "/library"
|
||||
}
|
||||
className="underline"
|
||||
>
|
||||
Agent Library
|
||||
</Link>
|
||||
{!isNodeSaved && " (after saving the graph)"}.
|
||||
</Text>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
</div>
|
||||
|
||||
<Text variant="small" className="mb-4 ml-6 !text-purple-700">
|
||||
Below inputs are only for display purposes and cannot be edited.
|
||||
</Text>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -10,10 +10,14 @@ export const FormCreator = React.memo(
|
||||
jsonSchema,
|
||||
nodeId,
|
||||
uiType,
|
||||
showHandles = true,
|
||||
className,
|
||||
}: {
|
||||
jsonSchema: RJSFSchema;
|
||||
nodeId: string;
|
||||
uiType: BlockUIType;
|
||||
showHandles?: boolean;
|
||||
className?: string;
|
||||
}) => {
|
||||
const updateNodeData = useNodeStore((state) => state.updateNodeData);
|
||||
const getHardCodedValues = useNodeStore(
|
||||
@@ -29,18 +33,20 @@ export const FormCreator = React.memo(
|
||||
const initialValues = getHardCodedValues(nodeId);
|
||||
|
||||
return (
|
||||
<FormRenderer
|
||||
jsonSchema={jsonSchema}
|
||||
handleChange={handleChange}
|
||||
uiSchema={uiSchema}
|
||||
initialValues={initialValues}
|
||||
formContext={{
|
||||
nodeId: nodeId,
|
||||
uiType: uiType,
|
||||
showHandles: true,
|
||||
size: "small",
|
||||
}}
|
||||
/>
|
||||
<div className={className}>
|
||||
<FormRenderer
|
||||
jsonSchema={jsonSchema}
|
||||
handleChange={handleChange}
|
||||
uiSchema={uiSchema}
|
||||
initialValues={initialValues}
|
||||
formContext={{
|
||||
nodeId: nodeId,
|
||||
uiType: uiType,
|
||||
showHandles: showHandles,
|
||||
size: "small",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
@@ -7,6 +7,8 @@ import { PlusIcon } from "@phosphor-icons/react";
|
||||
import { BlockInfo } from "@/app/api/__generated__/models/blockInfo";
|
||||
import { useControlPanelStore } from "../../../stores/controlPanelStore";
|
||||
import { blockDragPreviewStyle } from "./style";
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
import { useNodeStore } from "../../../stores/nodeStore";
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
@@ -29,6 +31,23 @@ export const Block: BlockComponent = ({
|
||||
const setBlockMenuOpen = useControlPanelStore(
|
||||
(state) => state.setBlockMenuOpen,
|
||||
);
|
||||
const { setViewport } = useReactFlow();
|
||||
const { addBlock } = useNodeStore();
|
||||
|
||||
const handleClick = () => {
|
||||
const customNode = addBlock(blockData);
|
||||
setTimeout(() => {
|
||||
setViewport(
|
||||
{
|
||||
x: -customNode.position.x * 0.8 + window.innerWidth / 2,
|
||||
y: -customNode.position.y * 0.8 + (window.innerHeight - 400) / 2,
|
||||
zoom: 0.8,
|
||||
},
|
||||
{ duration: 500 },
|
||||
);
|
||||
}, 50);
|
||||
};
|
||||
|
||||
const handleDragStart = (e: React.DragEvent<HTMLButtonElement>) => {
|
||||
e.dataTransfer.effectAllowed = "copy";
|
||||
e.dataTransfer.setData("application/reactflow", JSON.stringify(blockData));
|
||||
@@ -55,6 +74,7 @@ export const Block: BlockComponent = ({
|
||||
className,
|
||||
)}
|
||||
onDragStart={handleDragStart}
|
||||
onClick={handleClick}
|
||||
{...rest}
|
||||
>
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import React from "react";
|
||||
import { Block } from "../Block";
|
||||
import { blockMenuContainerStyle } from "../style";
|
||||
import { useNodeStore } from "../../../../stores/nodeStore";
|
||||
import { BlockInfo } from "@/app/api/__generated__/models/blockInfo";
|
||||
|
||||
interface BlocksListProps {
|
||||
@@ -13,7 +12,6 @@ export const BlocksList: React.FC<BlocksListProps> = ({
|
||||
blocks,
|
||||
loading = false,
|
||||
}) => {
|
||||
const { addBlock } = useNodeStore();
|
||||
if (loading) {
|
||||
return (
|
||||
<div className={blockMenuContainerStyle}>
|
||||
@@ -28,7 +26,6 @@ export const BlocksList: React.FC<BlocksListProps> = ({
|
||||
key={block.id}
|
||||
title={block.name}
|
||||
description={block.description}
|
||||
onClick={() => addBlock(block)}
|
||||
blockData={block}
|
||||
/>
|
||||
));
|
||||
|
||||
@@ -11,7 +11,6 @@ import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
import { blockMenuContainerStyle } from "../style";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { NoSearchResult } from "../NoSearchResult";
|
||||
import { useNodeStore } from "../../../../stores/nodeStore";
|
||||
|
||||
export const BlockMenuSearch = () => {
|
||||
const {
|
||||
@@ -22,7 +21,6 @@ export const BlockMenuSearch = () => {
|
||||
searchLoading,
|
||||
} = useBlockMenuSearch();
|
||||
const { searchQuery } = useBlockMenuStore();
|
||||
const addBlock = useNodeStore((state) => state.addBlock);
|
||||
|
||||
if (searchLoading) {
|
||||
return (
|
||||
@@ -75,7 +73,6 @@ export const BlockMenuSearch = () => {
|
||||
title={data.name}
|
||||
highlightedText={searchQuery}
|
||||
description={data.description}
|
||||
onClick={() => addBlock(data)}
|
||||
blockData={data}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -7,7 +7,8 @@ const SEARCH_DEBOUNCE_MS = 300;
|
||||
export const useBlockMenuSearchBar = () => {
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [localQuery, setLocalQuery] = useState("");
|
||||
const { setSearchQuery, setSearchId, searchId } = useBlockMenuStore();
|
||||
const { setSearchQuery, setSearchId, searchId, searchQuery } =
|
||||
useBlockMenuStore();
|
||||
|
||||
const searchIdRef = useRef(searchId);
|
||||
useEffect(() => {
|
||||
@@ -39,6 +40,10 @@ export const useBlockMenuSearchBar = () => {
|
||||
debouncedSetSearchQuery.cancel();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setLocalQuery(searchQuery);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
handleClear,
|
||||
inputRef,
|
||||
|
||||
@@ -5,7 +5,6 @@ import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { useIntegrationBlocks } from "./useIntegrationBlocks";
|
||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
|
||||
import { useNodeStore } from "../../../../stores/nodeStore";
|
||||
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
|
||||
export const IntegrationBlocks = () => {
|
||||
@@ -21,7 +20,6 @@ export const IntegrationBlocks = () => {
|
||||
error,
|
||||
refetch,
|
||||
} = useIntegrationBlocks();
|
||||
const addBlock = useNodeStore((state) => state.addBlock);
|
||||
|
||||
if (blocksLoading) {
|
||||
return (
|
||||
@@ -93,8 +91,8 @@ export const IntegrationBlocks = () => {
|
||||
key={block.id}
|
||||
title={block.name}
|
||||
description={block.description}
|
||||
blockData={block}
|
||||
icon_url={`/integrations/${integration}.png`}
|
||||
onClick={() => addBlock(block)}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
@@ -5,12 +5,18 @@ import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
import { highlightText } from "./helpers";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { useControlPanelStore } from "../../../stores/controlPanelStore";
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
import { useNodeStore } from "../../../stores/nodeStore";
|
||||
import { BlockInfo } from "@/app/api/__generated__/models/blockInfo";
|
||||
import { blockDragPreviewStyle } from "./style";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon_url?: string;
|
||||
highlightedText?: string;
|
||||
blockData: BlockInfo;
|
||||
}
|
||||
|
||||
interface IntegrationBlockComponent extends React.FC<Props> {
|
||||
@@ -23,16 +29,57 @@ export const IntegrationBlock: IntegrationBlockComponent = ({
|
||||
description,
|
||||
className,
|
||||
highlightedText,
|
||||
blockData,
|
||||
...rest
|
||||
}) => {
|
||||
const setBlockMenuOpen = useControlPanelStore(
|
||||
(state) => state.setBlockMenuOpen,
|
||||
);
|
||||
const { setViewport } = useReactFlow();
|
||||
const { addBlock } = useNodeStore();
|
||||
|
||||
const handleClick = () => {
|
||||
const customNode = addBlock(blockData);
|
||||
setTimeout(() => {
|
||||
setViewport(
|
||||
{
|
||||
x: -customNode.position.x * 0.8 + window.innerWidth / 2,
|
||||
y: -customNode.position.y * 0.8 + (window.innerHeight - 400) / 2,
|
||||
zoom: 0.8,
|
||||
},
|
||||
{ duration: 500 },
|
||||
);
|
||||
}, 50);
|
||||
};
|
||||
|
||||
const handleDragStart = (e: React.DragEvent<HTMLButtonElement>) => {
|
||||
e.dataTransfer.effectAllowed = "copy";
|
||||
e.dataTransfer.setData("application/reactflow", JSON.stringify(blockData));
|
||||
|
||||
setBlockMenuOpen(false);
|
||||
|
||||
// preview when user drags it
|
||||
const dragPreview = document.createElement("div");
|
||||
dragPreview.style.cssText = blockDragPreviewStyle;
|
||||
dragPreview.textContent = beautifyString(title || "");
|
||||
|
||||
document.body.appendChild(dragPreview);
|
||||
e.dataTransfer.setDragImage(dragPreview, 0, 0);
|
||||
|
||||
setTimeout(() => document.body.removeChild(dragPreview), 0);
|
||||
};
|
||||
|
||||
return (
|
||||
<Button
|
||||
draggable={true}
|
||||
variant={"ghost"}
|
||||
className={cn(
|
||||
"group flex h-16 w-full min-w-[7.5rem] items-center justify-start gap-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 px-[0.875rem] py-[0.625rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:cursor-not-allowed",
|
||||
className,
|
||||
)}
|
||||
onDragStart={handleDragStart}
|
||||
onClick={handleClick}
|
||||
{...rest}
|
||||
>
|
||||
<div className="relative h-[2.625rem] w-[2.625rem] rounded-[0.5rem] bg-white">
|
||||
|
||||
@@ -16,6 +16,9 @@ export const MyAgentsContent = () => {
|
||||
error,
|
||||
status,
|
||||
refetch,
|
||||
handleAddBlock,
|
||||
isGettingAgentDetails,
|
||||
selectedAgentId,
|
||||
} = useMyAgentsContent();
|
||||
|
||||
if (agentLoading) {
|
||||
@@ -59,7 +62,9 @@ export const MyAgentsContent = () => {
|
||||
title={agent.name}
|
||||
edited_time={agent.updated_at}
|
||||
version={agent.graph_version}
|
||||
isLoading={isGettingAgentDetails && selectedAgentId === agent.id}
|
||||
image_url={agent.image_url}
|
||||
onClick={() => handleAddBlock(agent)}
|
||||
/>
|
||||
))}
|
||||
</InfiniteScroll>
|
||||
|
||||
@@ -1,7 +1,22 @@
|
||||
import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import {
|
||||
getV2GetLibraryAgent,
|
||||
useGetV2ListLibraryAgentsInfinite,
|
||||
} from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { LibraryAgentResponse } from "@/app/api/__generated__/models/libraryAgentResponse";
|
||||
import { useState } from "react";
|
||||
import { convertLibraryAgentIntoCustomNode } from "../helpers";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
|
||||
export const useMyAgentsContent = () => {
|
||||
const [selectedAgentId, setSelectedAgentId] = useState<string | null>(null);
|
||||
const [isGettingAgentDetails, setIsGettingAgentDetails] = useState(false);
|
||||
const addBlock = useNodeStore(useShallow((state) => state.addBlock));
|
||||
const { setViewport } = useReactFlow();
|
||||
// This endpoints is not giving info about inputSchema and outputSchema
|
||||
// Will create new endpoint for this
|
||||
const {
|
||||
data: agents,
|
||||
fetchNextPage,
|
||||
@@ -38,6 +53,43 @@ export const useMyAgentsContent = () => {
|
||||
|
||||
const status = agents?.pages[0]?.status;
|
||||
|
||||
const handleAddBlock = async (agent: LibraryAgent) => {
|
||||
setSelectedAgentId(agent.id);
|
||||
setIsGettingAgentDetails(true);
|
||||
|
||||
try {
|
||||
const response = await getV2GetLibraryAgent(agent.id);
|
||||
|
||||
if (!response.data) {
|
||||
console.error("Failed to get agent details", selectedAgentId, agent.id);
|
||||
return;
|
||||
}
|
||||
|
||||
const { input_schema, output_schema } = response.data as LibraryAgent;
|
||||
const { block, hardcodedValues } = convertLibraryAgentIntoCustomNode(
|
||||
agent,
|
||||
input_schema,
|
||||
output_schema,
|
||||
);
|
||||
const customNode = addBlock(block, hardcodedValues);
|
||||
setTimeout(() => {
|
||||
setViewport(
|
||||
{
|
||||
x: -customNode.position.x * 0.8 + window.innerWidth / 2,
|
||||
y: -customNode.position.y * 0.8 + (window.innerHeight - 400) / 2,
|
||||
zoom: 0.8,
|
||||
},
|
||||
{ duration: 500 },
|
||||
);
|
||||
}, 50);
|
||||
} catch (error) {
|
||||
console.error("Error adding block:", error);
|
||||
} finally {
|
||||
setSelectedAgentId(null);
|
||||
setIsGettingAgentDetails(false);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
allAgents,
|
||||
agentLoading,
|
||||
@@ -48,5 +100,8 @@ export const useMyAgentsContent = () => {
|
||||
refetch,
|
||||
error,
|
||||
status,
|
||||
handleAddBlock,
|
||||
isGettingAgentDetails,
|
||||
selectedAgentId,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -4,14 +4,12 @@ import { Block } from "../Block";
|
||||
import { useSuggestionContent } from "./useSuggestionContent";
|
||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
import { blockMenuContainerStyle } from "../style";
|
||||
import { useNodeStore } from "../../../../stores/nodeStore";
|
||||
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
import { DefaultStateType } from "../types";
|
||||
|
||||
export const SuggestionContent = () => {
|
||||
const { setIntegration, setDefaultState } = useBlockMenuStore();
|
||||
const { data, isLoading, isError, error, refetch } = useSuggestionContent();
|
||||
const addBlock = useNodeStore((state) => state.addBlock);
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
@@ -76,7 +74,6 @@ export const SuggestionContent = () => {
|
||||
key={`block-${index}`}
|
||||
title={block.name}
|
||||
description={block.description}
|
||||
onClick={() => addBlock(block)}
|
||||
blockData={block}
|
||||
/>
|
||||
))
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { Button } from "@/components/__legacy__/ui/button";
|
||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Plus } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
import { highlightText } from "./helpers";
|
||||
import { formatTimeAgo } from "@/lib/utils/time";
|
||||
import { CircleNotchIcon } from "@phosphor-icons/react";
|
||||
import { PlusIcon } from "@phosphor-icons/react/dist/ssr";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
isLoading?: boolean;
|
||||
title?: string;
|
||||
edited_time?: Date;
|
||||
version?: number;
|
||||
@@ -20,6 +22,7 @@ interface UGCAgentBlockComponent extends React.FC<Props> {
|
||||
}
|
||||
|
||||
export const UGCAgentBlock: UGCAgentBlockComponent = ({
|
||||
isLoading,
|
||||
title,
|
||||
image_url,
|
||||
edited_time = new Date(),
|
||||
@@ -85,7 +88,11 @@ export const UGCAgentBlock: UGCAgentBlockComponent = ({
|
||||
"flex h-7 w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
{isLoading ? (
|
||||
<CircleNotchIcon className="h-5 w-5 animate-spin text-zinc-50" />
|
||||
) : (
|
||||
<PlusIcon className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
)}
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { BlockUIType } from "../../types";
|
||||
import { BlockInfo } from "@/app/api/__generated__/models/blockInfo";
|
||||
import { BlockCategory } from "../../helper";
|
||||
import { RJSFSchema } from "@rjsf/utils";
|
||||
import { SpecialBlockID } from "@/lib/autogpt-server-api";
|
||||
|
||||
export const highlightText = (
|
||||
text: string | undefined,
|
||||
highlight: string | undefined,
|
||||
@@ -20,3 +27,37 @@ export const highlightText = (
|
||||
),
|
||||
);
|
||||
};
|
||||
|
||||
export const convertLibraryAgentIntoCustomNode = (
|
||||
agent: LibraryAgent,
|
||||
inputSchema: RJSFSchema = {} as RJSFSchema,
|
||||
outputSchema: RJSFSchema = {} as RJSFSchema,
|
||||
) => {
|
||||
const block: BlockInfo = {
|
||||
id: SpecialBlockID.AGENT,
|
||||
name: agent.name,
|
||||
description:
|
||||
`Ver.${agent.graph_version}` +
|
||||
(agent.description ? ` | ${agent.description}` : ""),
|
||||
categories: [{ category: BlockCategory.AGENT, description: "" }],
|
||||
inputSchema: inputSchema,
|
||||
outputSchema: outputSchema,
|
||||
staticOutput: false,
|
||||
uiType: BlockUIType.AGENT,
|
||||
costs: [],
|
||||
contributors: [],
|
||||
};
|
||||
|
||||
const hardcodedValues: Record<string, any> = {
|
||||
graph_id: agent.graph_id,
|
||||
graph_version: agent.graph_version,
|
||||
input_schema: inputSchema,
|
||||
output_schema: outputSchema,
|
||||
agent_name: agent.name,
|
||||
};
|
||||
|
||||
return {
|
||||
block,
|
||||
hardcodedValues,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
// import { Separator } from "@/components/__legacy__/ui/separator";
|
||||
import { cn } from "@/lib/utils";
|
||||
import React, { useMemo } from "react";
|
||||
import React, { memo } from "react";
|
||||
import { BlockMenu } from "./NewBlockMenu/BlockMenu/BlockMenu";
|
||||
import { useNewControlPanel } from "./useNewControlPanel";
|
||||
// import { NewSaveControl } from "../SaveControl/NewSaveControl";
|
||||
import { GraphExecutionID } from "@/lib/autogpt-server-api";
|
||||
// import { ControlPanelButton } from "../ControlPanelButton";
|
||||
import { ArrowUUpLeftIcon, ArrowUUpRightIcon } from "@phosphor-icons/react";
|
||||
// import { GraphSearchMenu } from "../GraphMenu/GraphMenu";
|
||||
import { history } from "@/app/(platform)/build/components/legacy-builder/history";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { Separator } from "@/components/__legacy__/ui/separator";
|
||||
import { NewSaveControl } from "./NewSaveControl/NewSaveControl";
|
||||
@@ -31,56 +29,39 @@ export type NewControlPanelProps = {
|
||||
onNodeSelect?: (nodeId: string) => void;
|
||||
onNodeHover?: (nodeId: string) => void;
|
||||
};
|
||||
export const NewControlPanel = ({
|
||||
flowExecutionID: _flowExecutionID,
|
||||
visualizeBeads: _visualizeBeads,
|
||||
pinSavePopover: _pinSavePopover,
|
||||
pinBlocksPopover: _pinBlocksPopover,
|
||||
nodes: _nodes,
|
||||
onNodeSelect: _onNodeSelect,
|
||||
onNodeHover: _onNodeHover,
|
||||
}: NewControlPanelProps) => {
|
||||
const _isGraphSearchEnabled = useGetFlag(Flag.GRAPH_SEARCH);
|
||||
export const NewControlPanel = memo(
|
||||
({
|
||||
flowExecutionID: _flowExecutionID,
|
||||
visualizeBeads: _visualizeBeads,
|
||||
pinSavePopover: _pinSavePopover,
|
||||
pinBlocksPopover: _pinBlocksPopover,
|
||||
nodes: _nodes,
|
||||
onNodeSelect: _onNodeSelect,
|
||||
onNodeHover: _onNodeHover,
|
||||
}: NewControlPanelProps) => {
|
||||
const _isGraphSearchEnabled = useGetFlag(Flag.GRAPH_SEARCH);
|
||||
|
||||
const {
|
||||
// agentDescription,
|
||||
// setAgentDescription,
|
||||
// saveAgent,
|
||||
// agentName,
|
||||
// setAgentName,
|
||||
// savedAgent,
|
||||
// isSaving,
|
||||
// isRunning,
|
||||
// isStopping,
|
||||
} = useNewControlPanel({});
|
||||
const {
|
||||
// agentDescription,
|
||||
// setAgentDescription,
|
||||
// saveAgent,
|
||||
// agentName,
|
||||
// setAgentName,
|
||||
// savedAgent,
|
||||
// isSaving,
|
||||
// isRunning,
|
||||
// isStopping,
|
||||
} = useNewControlPanel({});
|
||||
|
||||
const _controls: Control[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: "Undo",
|
||||
icon: <ArrowUUpLeftIcon size={20} weight="bold" />,
|
||||
onClick: history.undo,
|
||||
disabled: !history.canUndo(),
|
||||
},
|
||||
{
|
||||
label: "Redo",
|
||||
icon: <ArrowUUpRightIcon size={20} weight="bold" />,
|
||||
onClick: history.redo,
|
||||
disabled: !history.canRedo(),
|
||||
},
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
return (
|
||||
<section
|
||||
className={cn(
|
||||
"absolute left-4 top-10 z-10 w-[4.25rem] overflow-hidden rounded-[1rem] border-none bg-white p-0 shadow-[0_1px_5px_0_rgba(0,0,0,0.1)]",
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col items-center justify-center rounded-[1rem] p-0">
|
||||
<BlockMenu />
|
||||
{/* <Separator className="text-[#E1E1E1]" />
|
||||
return (
|
||||
<section
|
||||
className={cn(
|
||||
"absolute left-4 top-10 z-10 w-[4.25rem] overflow-hidden rounded-[1rem] border-none bg-white p-0 shadow-[0_1px_5px_0_rgba(0,0,0,0.1)]",
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col items-center justify-center rounded-[1rem] p-0">
|
||||
<BlockMenu />
|
||||
{/* <Separator className="text-[#E1E1E1]" />
|
||||
{isGraphSearchEnabled && (
|
||||
<>
|
||||
<GraphSearchMenu
|
||||
@@ -105,13 +86,16 @@ export const NewControlPanel = ({
|
||||
{control.icon}
|
||||
</ControlPanelButton>
|
||||
))} */}
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
<NewSaveControl />
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
<UndoRedoButtons />
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
<NewSaveControl />
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
<UndoRedoButtons />
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
export default NewControlPanel;
|
||||
|
||||
NewControlPanel.displayName = "NewControlPanel";
|
||||
|
||||
@@ -42,9 +42,10 @@ export const graphsEquivalent = (
|
||||
name: current.name,
|
||||
description: current.description,
|
||||
nodes: sortNodes(current.nodes ?? []).map(({ id: _, ...rest }) => rest),
|
||||
links: sortLinks(current.links ?? []).map(
|
||||
({ source_id: _, sink_id: __, ...rest }) => rest,
|
||||
),
|
||||
links: sortLinks(current.links ?? []).map((v) => ({
|
||||
sink_name: v.sink_name,
|
||||
source_name: v.source_name,
|
||||
})),
|
||||
};
|
||||
|
||||
return deepEquals(_saved, _current);
|
||||
|
||||
@@ -5,20 +5,15 @@ import { useEdgeStore } from "../stores/edgeStore";
|
||||
import { useNodeStore } from "../stores/nodeStore";
|
||||
import { scrollbarStyles } from "@/components/styles/scrollbars";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { customEdgeToLink } from "./helper";
|
||||
|
||||
export const RightSidebar = () => {
|
||||
const connections = useEdgeStore((s) => s.connections);
|
||||
const edges = useEdgeStore((s) => s.edges);
|
||||
const nodes = useNodeStore((s) => s.nodes);
|
||||
|
||||
const backendLinks: Link[] = useMemo(
|
||||
() =>
|
||||
connections.map((c) => ({
|
||||
source_id: c.source,
|
||||
sink_id: c.target,
|
||||
source_name: c.sourceHandle,
|
||||
sink_name: c.targetHandle,
|
||||
})),
|
||||
[connections],
|
||||
() => edges.map(customEdgeToLink),
|
||||
[edges],
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -61,16 +56,16 @@ export const RightSidebar = () => {
|
||||
Links ({backendLinks.length})
|
||||
</h3>
|
||||
<div className="mb-6 space-y-3">
|
||||
{connections.map((c) => (
|
||||
{backendLinks.map((l) => (
|
||||
<div
|
||||
key={c.edge_id}
|
||||
key={l.id}
|
||||
className="rounded border p-2 text-xs dark:border-slate-700"
|
||||
>
|
||||
<div className="font-medium">
|
||||
{c.source}[{c.sourceHandle}] → {c.target}[{c.targetHandle}]
|
||||
{l.source_id}[{l.source_name}] → {l.sink_id}[{l.sink_name}]
|
||||
</div>
|
||||
<div className="mt-1 text-slate-500 dark:text-slate-400">
|
||||
edge_id: {c.edge_id}
|
||||
edge_id: {l.id}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
|
||||
@@ -6,6 +6,9 @@ import {
|
||||
import { BlockUIType } from "./types";
|
||||
import { NodeModel } from "@/app/api/__generated__/models/nodeModel";
|
||||
import { NodeModelMetadata } from "@/app/api/__generated__/models/nodeModelMetadata";
|
||||
import { Link } from "@/app/api/__generated__/models/link";
|
||||
import { CustomEdge } from "./FlowEditor/edges/CustomEdge";
|
||||
import { XYPosition } from "@xyflow/react";
|
||||
|
||||
export const convertBlockInfoIntoCustomNodeData = (
|
||||
block: BlockInfo,
|
||||
@@ -19,6 +22,7 @@ export const convertBlockInfoIntoCustomNodeData = (
|
||||
outputSchema: block.outputSchema,
|
||||
categories: block.categories,
|
||||
uiType: block.uiType as BlockUIType,
|
||||
staticOutput: block.staticOutput,
|
||||
block_id: block.id,
|
||||
costs: block.costs,
|
||||
};
|
||||
@@ -35,7 +39,7 @@ export const convertNodesPlusBlockInfoIntoCustomNodes = (
|
||||
);
|
||||
const customNode: CustomNode = {
|
||||
id: node.id ?? "",
|
||||
data: customNodeData,
|
||||
data: { ...customNodeData, metadata: node.metadata },
|
||||
type: "custom",
|
||||
position: {
|
||||
x:
|
||||
@@ -57,6 +61,27 @@ export const convertNodesPlusBlockInfoIntoCustomNodes = (
|
||||
return customNode;
|
||||
};
|
||||
|
||||
export const linkToCustomEdge = (link: Link): CustomEdge => ({
|
||||
id: link.id ?? "",
|
||||
type: "custom" as const,
|
||||
source: link.source_id,
|
||||
target: link.sink_id,
|
||||
sourceHandle: link.source_name,
|
||||
targetHandle: link.sink_name,
|
||||
data: {
|
||||
isStatic: link.is_static,
|
||||
},
|
||||
});
|
||||
|
||||
export const customEdgeToLink = (edge: CustomEdge): Link => ({
|
||||
id: edge.id || undefined,
|
||||
source_id: edge.source,
|
||||
sink_id: edge.target,
|
||||
source_name: edge.sourceHandle || "",
|
||||
sink_name: edge.targetHandle || "",
|
||||
is_static: edge.data?.isStatic,
|
||||
});
|
||||
|
||||
export enum BlockCategory {
|
||||
AI = "AI",
|
||||
SOCIAL = "SOCIAL",
|
||||
@@ -91,3 +116,107 @@ export const isCostFilterMatch = (
|
||||
)
|
||||
: costFilter === inputValues;
|
||||
};
|
||||
|
||||
// ----- Position related helpers -----
|
||||
|
||||
export interface NodeDimensions {
|
||||
x: number;
|
||||
y: number;
|
||||
width: number;
|
||||
height: number;
|
||||
}
|
||||
|
||||
function rectanglesOverlap(
|
||||
rect1: NodeDimensions,
|
||||
rect2: NodeDimensions,
|
||||
): boolean {
|
||||
const x1 = rect1.x,
|
||||
y1 = rect1.y,
|
||||
w1 = rect1.width,
|
||||
h1 = rect1.height;
|
||||
const x2 = rect2.x,
|
||||
y2 = rect2.y,
|
||||
w2 = rect2.width,
|
||||
h2 = rect2.height;
|
||||
|
||||
return !(x1 + w1 <= x2 || x1 >= x2 + w2 || y1 + h1 <= y2 || y1 >= y2 + h2);
|
||||
}
|
||||
|
||||
export function findFreePosition(
|
||||
existingNodes: Array<{
|
||||
position: XYPosition;
|
||||
measured?: { width: number; height: number };
|
||||
}>,
|
||||
newNodeWidth: number = 500,
|
||||
margin: number = 60,
|
||||
): XYPosition {
|
||||
if (existingNodes.length === 0) {
|
||||
return { x: 100, y: 100 }; // Default starting position
|
||||
}
|
||||
|
||||
// Start from the most recently added node
|
||||
for (let i = existingNodes.length - 1; i >= 0; i--) {
|
||||
const lastNode = existingNodes[i];
|
||||
const lastNodeWidth = lastNode.measured?.width ?? 500;
|
||||
const lastNodeHeight = lastNode.measured?.height ?? 400;
|
||||
|
||||
// Try right
|
||||
const candidate = {
|
||||
x: lastNode.position.x + lastNodeWidth + margin,
|
||||
y: lastNode.position.y,
|
||||
width: newNodeWidth,
|
||||
height: 400, // Estimated height
|
||||
};
|
||||
|
||||
if (
|
||||
!existingNodes.some((n) =>
|
||||
rectanglesOverlap(candidate, {
|
||||
x: n.position.x,
|
||||
y: n.position.y,
|
||||
width: n.measured?.width ?? 500,
|
||||
height: n.measured?.height ?? 400,
|
||||
}),
|
||||
)
|
||||
) {
|
||||
return { x: candidate.x, y: candidate.y };
|
||||
}
|
||||
|
||||
// Try left
|
||||
candidate.x = lastNode.position.x - newNodeWidth - margin;
|
||||
if (
|
||||
!existingNodes.some((n) =>
|
||||
rectanglesOverlap(candidate, {
|
||||
x: n.position.x,
|
||||
y: n.position.y,
|
||||
width: n.measured?.width ?? 500,
|
||||
height: n.measured?.height ?? 400,
|
||||
}),
|
||||
)
|
||||
) {
|
||||
return { x: candidate.x, y: candidate.y };
|
||||
}
|
||||
|
||||
// Try below
|
||||
candidate.x = lastNode.position.x;
|
||||
candidate.y = lastNode.position.y + lastNodeHeight + margin;
|
||||
if (
|
||||
!existingNodes.some((n) =>
|
||||
rectanglesOverlap(candidate, {
|
||||
x: n.position.x,
|
||||
y: n.position.y,
|
||||
width: n.measured?.width ?? 500,
|
||||
height: n.measured?.height ?? 400,
|
||||
}),
|
||||
)
|
||||
) {
|
||||
return { x: candidate.x, y: candidate.y };
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: place it far to the right
|
||||
const lastNode = existingNodes[existingNodes.length - 1];
|
||||
return {
|
||||
x: lastNode.position.x + 600,
|
||||
y: lastNode.position.y,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -762,6 +762,22 @@ const FlowEditor: React.FC<{
|
||||
[],
|
||||
);
|
||||
|
||||
// Track when we should run or schedule after save completes
|
||||
const [shouldRunAfterSave, setShouldRunAfterSave] = useState(false);
|
||||
const [shouldScheduleAfterSave, setShouldScheduleAfterSave] = useState(false);
|
||||
|
||||
// Effect to trigger runOrOpenInput or openRunInputDialog after saving completes
|
||||
useEffect(() => {
|
||||
if (!isSaving && shouldRunAfterSave) {
|
||||
runnerUIRef.current?.runOrOpenInput();
|
||||
setShouldRunAfterSave(false);
|
||||
}
|
||||
if (!isSaving && shouldScheduleAfterSave) {
|
||||
runnerUIRef.current?.openRunInputDialog();
|
||||
setShouldScheduleAfterSave(false);
|
||||
}
|
||||
}, [isSaving, shouldRunAfterSave, shouldScheduleAfterSave]);
|
||||
|
||||
const handleRunButton = useCallback(async () => {
|
||||
if (isRunning) return;
|
||||
if (!savedAgent) {
|
||||
@@ -771,7 +787,7 @@ const FlowEditor: React.FC<{
|
||||
return;
|
||||
}
|
||||
await saveAgent();
|
||||
runnerUIRef.current?.runOrOpenInput();
|
||||
setShouldRunAfterSave(true);
|
||||
}, [isRunning, savedAgent, toast, saveAgent]);
|
||||
|
||||
const handleScheduleButton = useCallback(async () => {
|
||||
@@ -783,7 +799,7 @@ const FlowEditor: React.FC<{
|
||||
return;
|
||||
}
|
||||
await saveAgent();
|
||||
runnerUIRef.current?.openRunInputDialog();
|
||||
setShouldScheduleAfterSave(true);
|
||||
}, [isScheduling, savedAgent, toast, saveAgent]);
|
||||
|
||||
const isNewBlockEnabled = useGetFlag(Flag.NEW_BLOCK_MENU);
|
||||
|
||||
@@ -2,10 +2,8 @@
|
||||
|
||||
import { useCallback } from "react";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { parseAsInteger, parseAsString, useQueryStates } from "nuqs";
|
||||
import {
|
||||
getGetV1GetSpecificGraphQueryKey,
|
||||
useGetV1GetSpecificGraph,
|
||||
usePostV1CreateNewGraph,
|
||||
usePutV1UpdateGraphVersion,
|
||||
@@ -15,6 +13,8 @@ import { Graph } from "@/app/api/__generated__/models/graph";
|
||||
import { useNodeStore } from "../stores/nodeStore";
|
||||
import { useEdgeStore } from "../stores/edgeStore";
|
||||
import { graphsEquivalent } from "../components/NewControlPanel/NewSaveControl/helpers";
|
||||
import { useGraphStore } from "../stores/graphStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
|
||||
export type SaveGraphOptions = {
|
||||
showToast?: boolean;
|
||||
@@ -28,13 +28,16 @@ export const useSaveGraph = ({
|
||||
onError,
|
||||
}: SaveGraphOptions) => {
|
||||
const { toast } = useToast();
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const [{ flowID, flowVersion }, setQueryStates] = useQueryStates({
|
||||
flowID: parseAsString,
|
||||
flowVersion: parseAsInteger,
|
||||
});
|
||||
|
||||
const setGraphSchemas = useGraphStore(
|
||||
useShallow((state) => state.setGraphSchemas),
|
||||
);
|
||||
|
||||
const { data: graph } = useGetV1GetSpecificGraph(
|
||||
flowID ?? "",
|
||||
flowVersion !== null ? { version: flowVersion } : {},
|
||||
@@ -55,9 +58,6 @@ export const useSaveGraph = ({
|
||||
flowID: data.id,
|
||||
flowVersion: data.version,
|
||||
});
|
||||
queryClient.refetchQueries({
|
||||
queryKey: getGetV1GetSpecificGraphQueryKey(data.id),
|
||||
});
|
||||
onSuccess?.(data);
|
||||
if (showToast) {
|
||||
toast({
|
||||
@@ -69,6 +69,12 @@ export const useSaveGraph = ({
|
||||
},
|
||||
onError: (error) => {
|
||||
onError?.(error);
|
||||
toast({
|
||||
title: "Error saving graph",
|
||||
description:
|
||||
(error as any).message ?? "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -82,9 +88,6 @@ export const useSaveGraph = ({
|
||||
flowID: data.id,
|
||||
flowVersion: data.version,
|
||||
});
|
||||
queryClient.refetchQueries({
|
||||
queryKey: getGetV1GetSpecificGraphQueryKey(data.id),
|
||||
});
|
||||
onSuccess?.(data);
|
||||
if (showToast) {
|
||||
toast({
|
||||
@@ -134,7 +137,13 @@ export const useSaveGraph = ({
|
||||
return;
|
||||
}
|
||||
|
||||
await updateGraph({ graphId: graph.id, data: data });
|
||||
const response = await updateGraph({ graphId: graph.id, data: data });
|
||||
const graphData = response.data as GraphModel;
|
||||
setGraphSchemas(
|
||||
graphData.input_schema,
|
||||
graphData.credentials_input_schema,
|
||||
graphData.output_schema,
|
||||
);
|
||||
} else {
|
||||
const data: Graph = {
|
||||
name: values?.name || `New Agent ${new Date().toISOString()}`,
|
||||
@@ -143,7 +152,13 @@ export const useSaveGraph = ({
|
||||
links: graphLinks,
|
||||
};
|
||||
|
||||
await createNewGraph({ data: { graph: data } });
|
||||
const response = await createNewGraph({ data: { graph: data } });
|
||||
const graphData = response.data as GraphModel;
|
||||
setGraphSchemas(
|
||||
graphData.input_schema,
|
||||
graphData.credentials_input_schema,
|
||||
graphData.output_schema,
|
||||
);
|
||||
}
|
||||
},
|
||||
[graph, toast, createNewGraph, updateGraph],
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
"use client";
|
||||
|
||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||
import FlowEditor from "@/app/(platform)/build/components/legacy-builder/Flow/Flow";
|
||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||
// import LoadingBox from "@/components/__legacy__/ui/loading";
|
||||
import { GraphID } from "@/lib/autogpt-server-api/types";
|
||||
import { ReactFlowProvider } from "@xyflow/react";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { Flow } from "./components/FlowEditor/Flow/Flow";
|
||||
import { BuilderViewTabs } from "./components/BuilderViewTabs/BuilderViewTabs";
|
||||
import { useBuilderView } from "./components/BuilderViewTabs/useBuilderViewTabs";
|
||||
import { ReactFlowProvider } from "@xyflow/react";
|
||||
import { Flow } from "./components/FlowEditor/Flow/Flow";
|
||||
|
||||
function BuilderContent() {
|
||||
const query = useSearchParams();
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import { create } from "zustand";
|
||||
import { CustomNode } from "../components/FlowEditor/nodes/CustomNode/CustomNode";
|
||||
import { Connection, useEdgeStore } from "./edgeStore";
|
||||
import { useEdgeStore } from "./edgeStore";
|
||||
import { Key, storage } from "@/services/storage/local-storage";
|
||||
import { useNodeStore } from "./nodeStore";
|
||||
import { CustomEdge } from "../components/FlowEditor/edges/CustomEdge";
|
||||
|
||||
interface CopyableData {
|
||||
nodes: CustomNode[];
|
||||
connections: Connection[];
|
||||
edges: CustomEdge[];
|
||||
}
|
||||
|
||||
type CopyPasteStore = {
|
||||
@@ -17,14 +18,14 @@ type CopyPasteStore = {
|
||||
export const useCopyPasteStore = create<CopyPasteStore>(() => ({
|
||||
copySelectedNodes: () => {
|
||||
const { nodes } = useNodeStore.getState();
|
||||
const { connections } = useEdgeStore.getState();
|
||||
const { edges } = useEdgeStore.getState();
|
||||
|
||||
const selectedNodes = nodes.filter((node) => node.selected);
|
||||
const selectedNodeIds = new Set(selectedNodes.map((node) => node.id));
|
||||
|
||||
const selectedConnections = connections.filter(
|
||||
(conn) =>
|
||||
selectedNodeIds.has(conn.source) && selectedNodeIds.has(conn.target),
|
||||
const selectedEdges = edges.filter(
|
||||
(edge) =>
|
||||
selectedNodeIds.has(edge.source) && selectedNodeIds.has(edge.target),
|
||||
);
|
||||
|
||||
const copiedData: CopyableData = {
|
||||
@@ -34,7 +35,7 @@ export const useCopyPasteStore = create<CopyPasteStore>(() => ({
|
||||
...node.data,
|
||||
},
|
||||
})),
|
||||
connections: selectedConnections,
|
||||
edges: selectedEdges,
|
||||
};
|
||||
|
||||
storage.set(Key.COPIED_FLOW_DATA, JSON.stringify(copiedData));
|
||||
@@ -46,7 +47,7 @@ export const useCopyPasteStore = create<CopyPasteStore>(() => ({
|
||||
|
||||
const copiedData = JSON.parse(copiedDataString) as CopyableData;
|
||||
const { addNode } = useNodeStore.getState();
|
||||
const { addConnection } = useEdgeStore.getState();
|
||||
const { addEdge } = useEdgeStore.getState();
|
||||
|
||||
const oldToNewIdMap: Record<string, string> = {};
|
||||
|
||||
@@ -85,15 +86,15 @@ export const useCopyPasteStore = create<CopyPasteStore>(() => ({
|
||||
});
|
||||
});
|
||||
|
||||
copiedData.connections.forEach((conn) => {
|
||||
const newSourceId = oldToNewIdMap[conn.source] ?? conn.source;
|
||||
const newTargetId = oldToNewIdMap[conn.target] ?? conn.target;
|
||||
copiedData.edges.forEach((edge) => {
|
||||
const newSourceId = oldToNewIdMap[edge.source] ?? edge.source;
|
||||
const newTargetId = oldToNewIdMap[edge.target] ?? edge.target;
|
||||
|
||||
addConnection({
|
||||
addEdge({
|
||||
source: newSourceId,
|
||||
target: newTargetId,
|
||||
sourceHandle: conn.sourceHandle ?? "",
|
||||
targetHandle: conn.targetHandle ?? "",
|
||||
sourceHandle: edge.sourceHandle ?? "",
|
||||
targetHandle: edge.targetHandle ?? "",
|
||||
});
|
||||
});
|
||||
},
|
||||
|
||||
@@ -1,103 +1,165 @@
|
||||
import { create } from "zustand";
|
||||
import { convertConnectionsToBackendLinks } from "../components/FlowEditor/edges/helpers";
|
||||
import { Link } from "@/app/api/__generated__/models/link";
|
||||
|
||||
export type Connection = {
|
||||
edge_id: string;
|
||||
source: string;
|
||||
sourceHandle: string;
|
||||
target: string;
|
||||
targetHandle: string;
|
||||
};
|
||||
import { CustomEdge } from "../components/FlowEditor/edges/CustomEdge";
|
||||
import { customEdgeToLink, linkToCustomEdge } from "../components/helper";
|
||||
import { MarkerType } from "@xyflow/react";
|
||||
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
|
||||
|
||||
type EdgeStore = {
|
||||
connections: Connection[];
|
||||
edges: CustomEdge[];
|
||||
|
||||
setConnections: (connections: Connection[]) => void;
|
||||
addConnection: (
|
||||
conn: Omit<Connection, "edge_id"> & { edge_id?: string },
|
||||
) => void;
|
||||
removeConnection: (edge_id: string) => void;
|
||||
upsertMany: (conns: Connection[]) => void;
|
||||
setEdges: (edges: CustomEdge[]) => void;
|
||||
addEdge: (edge: Omit<CustomEdge, "id"> & { id?: string }) => CustomEdge;
|
||||
removeEdge: (edgeId: string) => void;
|
||||
upsertMany: (edges: CustomEdge[]) => void;
|
||||
|
||||
getNodeConnections: (nodeId: string) => Connection[];
|
||||
getNodeEdges: (nodeId: string) => CustomEdge[];
|
||||
isInputConnected: (nodeId: string, handle: string) => boolean;
|
||||
isOutputConnected: (nodeId: string, handle: string) => boolean;
|
||||
getBackendLinks: () => Link[];
|
||||
addLinks: (links: Link[]) => void;
|
||||
|
||||
getAllHandleIdsOfANode: (nodeId: string) => string[];
|
||||
|
||||
updateEdgeBeads: (
|
||||
targetNodeId: string,
|
||||
executionResult: NodeExecutionResult,
|
||||
) => void;
|
||||
resetEdgeBeads: () => void;
|
||||
};
|
||||
|
||||
function makeEdgeId(conn: Omit<Connection, "edge_id">) {
|
||||
return `${conn.source}:${conn.sourceHandle}->${conn.target}:${conn.targetHandle}`;
|
||||
function makeEdgeId(edge: Omit<CustomEdge, "id">) {
|
||||
return `${edge.source}:${edge.sourceHandle}->${edge.target}:${edge.targetHandle}`;
|
||||
}
|
||||
|
||||
export const useEdgeStore = create<EdgeStore>((set, get) => ({
|
||||
connections: [],
|
||||
edges: [],
|
||||
|
||||
setConnections: (connections) => set({ connections }),
|
||||
setEdges: (edges) => set({ edges }),
|
||||
|
||||
addConnection: (conn) => {
|
||||
const edge_id = conn.edge_id || makeEdgeId(conn);
|
||||
const newConn: Connection = { edge_id, ...conn };
|
||||
addEdge: (edge) => {
|
||||
const id = edge.id || makeEdgeId(edge);
|
||||
const newEdge: CustomEdge = {
|
||||
type: "custom" as const,
|
||||
markerEnd: {
|
||||
type: MarkerType.ArrowClosed,
|
||||
strokeWidth: 2,
|
||||
color: "#555",
|
||||
},
|
||||
...edge,
|
||||
id,
|
||||
};
|
||||
|
||||
set((state) => {
|
||||
const exists = state.connections.some(
|
||||
(c) =>
|
||||
c.source === newConn.source &&
|
||||
c.target === newConn.target &&
|
||||
c.sourceHandle === newConn.sourceHandle &&
|
||||
c.targetHandle === newConn.targetHandle,
|
||||
const exists = state.edges.some(
|
||||
(e) =>
|
||||
e.source === newEdge.source &&
|
||||
e.target === newEdge.target &&
|
||||
e.sourceHandle === newEdge.sourceHandle &&
|
||||
e.targetHandle === newEdge.targetHandle,
|
||||
);
|
||||
if (exists) return state;
|
||||
return { connections: [...state.connections, newConn] };
|
||||
return { edges: [...state.edges, newEdge] };
|
||||
});
|
||||
|
||||
return { edge_id, ...conn };
|
||||
return newEdge;
|
||||
},
|
||||
|
||||
removeConnection: (edge_id) =>
|
||||
removeEdge: (edgeId) =>
|
||||
set((state) => ({
|
||||
connections: state.connections.filter((c) => c.edge_id !== edge_id),
|
||||
edges: state.edges.filter((e) => e.id !== edgeId),
|
||||
})),
|
||||
|
||||
upsertMany: (conns) =>
|
||||
upsertMany: (edges) =>
|
||||
set((state) => {
|
||||
const byKey = new Map(state.connections.map((c) => [c.edge_id, c]));
|
||||
conns.forEach((c) => {
|
||||
byKey.set(c.edge_id, c);
|
||||
const byKey = new Map(state.edges.map((e) => [e.id, e]));
|
||||
edges.forEach((e) => {
|
||||
byKey.set(e.id, e);
|
||||
});
|
||||
return { connections: Array.from(byKey.values()) };
|
||||
return { edges: Array.from(byKey.values()) };
|
||||
}),
|
||||
|
||||
getNodeConnections: (nodeId) =>
|
||||
get().connections.filter((c) => c.source === nodeId || c.target === nodeId),
|
||||
getNodeEdges: (nodeId) =>
|
||||
get().edges.filter((e) => e.source === nodeId || e.target === nodeId),
|
||||
|
||||
isInputConnected: (nodeId, handle) =>
|
||||
get().connections.some(
|
||||
(c) => c.target === nodeId && c.targetHandle === handle,
|
||||
),
|
||||
get().edges.some((e) => e.target === nodeId && e.targetHandle === handle),
|
||||
|
||||
isOutputConnected: (nodeId, handle) =>
|
||||
get().connections.some(
|
||||
(c) => c.source === nodeId && c.sourceHandle === handle,
|
||||
),
|
||||
getBackendLinks: () => convertConnectionsToBackendLinks(get().connections),
|
||||
get().edges.some((e) => e.source === nodeId && e.sourceHandle === handle),
|
||||
|
||||
addLinks: (links) =>
|
||||
getBackendLinks: () => get().edges.map(customEdgeToLink),
|
||||
|
||||
addLinks: (links) => {
|
||||
links.forEach((link) => {
|
||||
get().addConnection({
|
||||
edge_id: link.id ?? "",
|
||||
source: link.source_id,
|
||||
target: link.sink_id,
|
||||
sourceHandle: link.source_name,
|
||||
targetHandle: link.sink_name,
|
||||
});
|
||||
}),
|
||||
get().addEdge(linkToCustomEdge(link));
|
||||
});
|
||||
},
|
||||
|
||||
getAllHandleIdsOfANode: (nodeId) =>
|
||||
get()
|
||||
.connections.filter((c) => c.target === nodeId)
|
||||
.map((c) => c.targetHandle),
|
||||
.edges.filter((e) => e.target === nodeId)
|
||||
.map((e) => e.targetHandle || ""),
|
||||
|
||||
updateEdgeBeads: (
|
||||
targetNodeId: string,
|
||||
executionResult: NodeExecutionResult,
|
||||
) => {
|
||||
set((state) => ({
|
||||
edges: state.edges.map((edge) => {
|
||||
if (edge.target !== targetNodeId) {
|
||||
return edge;
|
||||
}
|
||||
|
||||
const beadData =
|
||||
edge.data?.beadData ??
|
||||
new Map<string, NodeExecutionResult["status"]>();
|
||||
|
||||
if (
|
||||
edge.targetHandle &&
|
||||
edge.targetHandle in executionResult.input_data
|
||||
) {
|
||||
beadData.set(executionResult.node_exec_id, executionResult.status);
|
||||
}
|
||||
|
||||
let beadUp = 0;
|
||||
let beadDown = 0;
|
||||
|
||||
beadData.forEach((status) => {
|
||||
beadUp++;
|
||||
if (status !== "INCOMPLETE") {
|
||||
beadDown++;
|
||||
}
|
||||
});
|
||||
|
||||
if (edge.data?.isStatic && beadUp > 0) {
|
||||
beadUp = beadDown + 1;
|
||||
}
|
||||
|
||||
return {
|
||||
...edge,
|
||||
data: {
|
||||
...edge.data,
|
||||
beadUp,
|
||||
beadDown,
|
||||
beadData,
|
||||
},
|
||||
};
|
||||
}),
|
||||
}));
|
||||
},
|
||||
|
||||
resetEdgeBeads: () => {
|
||||
set((state) => ({
|
||||
edges: state.edges.map((edge) => ({
|
||||
...edge,
|
||||
data: {
|
||||
...edge.data,
|
||||
beadUp: 0,
|
||||
beadDown: 0,
|
||||
beadData: new Map(),
|
||||
},
|
||||
})),
|
||||
}));
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -6,13 +6,17 @@ interface GraphStore {
|
||||
|
||||
inputSchema: Record<string, any> | null;
|
||||
credentialsInputSchema: Record<string, any> | null;
|
||||
outputSchema: Record<string, any> | null;
|
||||
|
||||
setGraphSchemas: (
|
||||
inputSchema: Record<string, any> | null,
|
||||
credentialsInputSchema: Record<string, any> | null,
|
||||
outputSchema: Record<string, any> | null,
|
||||
) => void;
|
||||
|
||||
hasInputs: () => boolean;
|
||||
hasCredentials: () => boolean;
|
||||
hasOutputs: () => boolean;
|
||||
reset: () => void;
|
||||
}
|
||||
|
||||
@@ -20,11 +24,17 @@ export const useGraphStore = create<GraphStore>((set, get) => ({
|
||||
isGraphRunning: false,
|
||||
inputSchema: null,
|
||||
credentialsInputSchema: null,
|
||||
outputSchema: null,
|
||||
|
||||
setIsGraphRunning: (isGraphRunning: boolean) => set({ isGraphRunning }),
|
||||
|
||||
setGraphSchemas: (inputSchema, credentialsInputSchema) =>
|
||||
set({ inputSchema, credentialsInputSchema }),
|
||||
setGraphSchemas: (inputSchema, credentialsInputSchema, outputSchema) =>
|
||||
set({ inputSchema, credentialsInputSchema, outputSchema }),
|
||||
|
||||
hasOutputs: () => {
|
||||
const { outputSchema } = get();
|
||||
return Object.keys(outputSchema?.properties ?? {}).length > 0;
|
||||
},
|
||||
|
||||
hasInputs: () => {
|
||||
const { inputSchema } = get();
|
||||
|
||||
@@ -2,12 +2,13 @@ import { create } from "zustand";
|
||||
import isEqual from "lodash/isEqual";
|
||||
|
||||
import { CustomNode } from "../components/FlowEditor/nodes/CustomNode/CustomNode";
|
||||
import { Connection, useEdgeStore } from "./edgeStore";
|
||||
import { useEdgeStore } from "./edgeStore";
|
||||
import { useNodeStore } from "./nodeStore";
|
||||
import { CustomEdge } from "../components/FlowEditor/edges/CustomEdge";
|
||||
|
||||
type HistoryState = {
|
||||
nodes: CustomNode[];
|
||||
connections: Connection[];
|
||||
edges: CustomEdge[];
|
||||
};
|
||||
|
||||
type HistoryStore = {
|
||||
@@ -15,6 +16,7 @@ type HistoryStore = {
|
||||
future: HistoryState[];
|
||||
undo: () => void;
|
||||
redo: () => void;
|
||||
initializeHistory: () => void;
|
||||
canUndo: () => boolean;
|
||||
canRedo: () => boolean;
|
||||
pushState: (state: HistoryState) => void;
|
||||
@@ -24,7 +26,7 @@ type HistoryStore = {
|
||||
const MAX_HISTORY = 50;
|
||||
|
||||
export const useHistoryStore = create<HistoryStore>((set, get) => ({
|
||||
past: [{ nodes: [], connections: [] }],
|
||||
past: [{ nodes: [], edges: [] }],
|
||||
future: [],
|
||||
|
||||
pushState: (state: HistoryState) => {
|
||||
@@ -41,6 +43,16 @@ export const useHistoryStore = create<HistoryStore>((set, get) => ({
|
||||
}));
|
||||
},
|
||||
|
||||
initializeHistory: () => {
|
||||
const currentNodes = useNodeStore.getState().nodes;
|
||||
const currentEdges = useEdgeStore.getState().edges;
|
||||
|
||||
set({
|
||||
past: [{ nodes: currentNodes, edges: currentEdges }],
|
||||
future: [],
|
||||
});
|
||||
},
|
||||
|
||||
undo: () => {
|
||||
const { past, future } = get();
|
||||
if (past.length <= 1) return;
|
||||
@@ -50,7 +62,7 @@ export const useHistoryStore = create<HistoryStore>((set, get) => ({
|
||||
const previousState = past[past.length - 2];
|
||||
|
||||
useNodeStore.getState().setNodes(previousState.nodes);
|
||||
useEdgeStore.getState().setConnections(previousState.connections);
|
||||
useEdgeStore.getState().setEdges(previousState.edges);
|
||||
|
||||
set({
|
||||
past: past.slice(0, -1),
|
||||
@@ -65,7 +77,7 @@ export const useHistoryStore = create<HistoryStore>((set, get) => ({
|
||||
const nextState = future[0];
|
||||
|
||||
useNodeStore.getState().setNodes(nextState.nodes);
|
||||
useEdgeStore.getState().setConnections(nextState.connections);
|
||||
useEdgeStore.getState().setEdges(nextState.edges);
|
||||
|
||||
set({
|
||||
past: [...past, nextState],
|
||||
@@ -76,5 +88,5 @@ export const useHistoryStore = create<HistoryStore>((set, get) => ({
|
||||
canUndo: () => get().past.length > 1,
|
||||
canRedo: () => get().future.length > 0,
|
||||
|
||||
clear: () => set({ past: [{ nodes: [], connections: [] }], future: [] }),
|
||||
clear: () => set({ past: [{ nodes: [], edges: [] }], future: [] }),
|
||||
}));
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user