mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-12 15:55:03 -05:00
Compare commits
4 Commits
refactor/r
...
fix/enable
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cdeefb8621 | ||
|
|
ba6d585170 | ||
|
|
90eac56525 | ||
|
|
75f8772f8a |
@@ -22,7 +22,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.workflow_run.head_branch }}
|
||||
fetch-depth: 0
|
||||
|
||||
2
.github/workflows/claude-dependabot.yml
vendored
2
.github/workflows/claude-dependabot.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
actions: read # Required for CI access
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/claude.yml
vendored
2
.github/workflows/claude.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
actions: read # Required for CI access
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@@ -58,7 +58,7 @@ jobs:
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
|
||||
2
.github/workflows/copilot-setup-steps.yml
vendored
2
.github/workflows/copilot-setup-steps.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
# If you do not check out your code, Copilot will do this for you.
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
2
.github/workflows/docs-block-sync.yml
vendored
2
.github/workflows/docs-block-sync.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
2
.github/workflows/docs-claude-review.yml
vendored
2
.github/workflows/docs-claude-review.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/docs-enhance.yml
vendored
2
.github/workflows/docs-enhance.yml
vendored
@@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.git_ref || github.ref_name }}
|
||||
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger deploy workflow
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DEPLOY_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
|
||||
@@ -17,7 +17,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.ref_name || 'master' }}
|
||||
|
||||
@@ -45,7 +45,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger deploy workflow
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DEPLOY_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
|
||||
2
.github/workflows/platform-backend-ci.yml
vendored
2
.github/workflows/platform-backend-ci.yml
vendored
@@ -68,7 +68,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
@@ -82,7 +82,7 @@ jobs:
|
||||
|
||||
- name: Dispatch Deploy Event
|
||||
if: steps.check_status.outputs.should_deploy == 'true'
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
@@ -110,7 +110,7 @@ jobs:
|
||||
|
||||
- name: Dispatch Undeploy Event (from comment)
|
||||
if: steps.check_status.outputs.should_undeploy == 'true'
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.action == 'closed' &&
|
||||
steps.check_pr_close.outputs.should_undeploy == 'true'
|
||||
uses: peter-evans/repository-dispatch@v4
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
|
||||
10
.github/workflows/platform-frontend-ci.yml
vendored
10
.github/workflows/platform-frontend-ci.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check for component changes
|
||||
uses: dorny/paths-filter@v3
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
@@ -107,7 +107,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -148,7 +148,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
@@ -277,7 +277,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
|
||||
4
.github/workflows/platform-fullstack-ci.yml
vendored
4
.github/workflows/platform-fullstack-ci.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
|
||||
2
.github/workflows/repo-workflow-checker.yml
vendored
2
.github/workflows/repo-workflow-checker.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
steps:
|
||||
# - name: Wait some time for all actions to start
|
||||
# run: sleep 30
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@v4
|
||||
# with:
|
||||
# fetch-depth: 0
|
||||
- name: Set up Python
|
||||
|
||||
@@ -96,7 +96,13 @@ class ChatConfig(BaseSettings):
|
||||
# Extended thinking configuration for Claude models
|
||||
thinking_enabled: bool = Field(
|
||||
default=True,
|
||||
description="Enable adaptive thinking for Claude models via OpenRouter",
|
||||
description="Enable extended thinking for Claude models via OpenRouter",
|
||||
)
|
||||
thinking_budget_tokens: int = Field(
|
||||
default=10000,
|
||||
ge=1000,
|
||||
le=100000,
|
||||
description="Maximum tokens for extended thinking (budget_tokens for Claude)",
|
||||
)
|
||||
|
||||
@field_validator("api_key", mode="before")
|
||||
|
||||
@@ -2,7 +2,7 @@ import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
from openai.types.chat import (
|
||||
@@ -104,26 +104,6 @@ class ChatSession(BaseModel):
|
||||
successful_agent_runs: dict[str, int] = {}
|
||||
successful_agent_schedules: dict[str, int] = {}
|
||||
|
||||
def add_tool_call_to_current_turn(self, tool_call: dict) -> None:
|
||||
"""Attach a tool_call to the current turn's assistant message.
|
||||
|
||||
Searches backwards for the most recent assistant message (stopping at
|
||||
any user message boundary). If found, appends the tool_call to it.
|
||||
Otherwise creates a new assistant message with the tool_call.
|
||||
"""
|
||||
for msg in reversed(self.messages):
|
||||
if msg.role == "user":
|
||||
break
|
||||
if msg.role == "assistant":
|
||||
if not msg.tool_calls:
|
||||
msg.tool_calls = []
|
||||
msg.tool_calls.append(tool_call)
|
||||
return
|
||||
|
||||
self.messages.append(
|
||||
ChatMessage(role="assistant", content="", tool_calls=[tool_call])
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def new(user_id: str) -> "ChatSession":
|
||||
return ChatSession(
|
||||
@@ -192,47 +172,6 @@ class ChatSession(BaseModel):
|
||||
successful_agent_schedules=successful_agent_schedules,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _merge_consecutive_assistant_messages(
|
||||
messages: list[ChatCompletionMessageParam],
|
||||
) -> list[ChatCompletionMessageParam]:
|
||||
"""Merge consecutive assistant messages into single messages.
|
||||
|
||||
Long-running tool flows can create split assistant messages: one with
|
||||
text content and another with tool_calls. Anthropic's API requires
|
||||
tool_result blocks to reference a tool_use in the immediately preceding
|
||||
assistant message, so these splits cause 400 errors via OpenRouter.
|
||||
"""
|
||||
if len(messages) < 2:
|
||||
return messages
|
||||
|
||||
result: list[ChatCompletionMessageParam] = [messages[0]]
|
||||
for msg in messages[1:]:
|
||||
prev = result[-1]
|
||||
if prev.get("role") != "assistant" or msg.get("role") != "assistant":
|
||||
result.append(msg)
|
||||
continue
|
||||
|
||||
prev = cast(ChatCompletionAssistantMessageParam, prev)
|
||||
curr = cast(ChatCompletionAssistantMessageParam, msg)
|
||||
|
||||
curr_content = curr.get("content") or ""
|
||||
if curr_content:
|
||||
prev_content = prev.get("content") or ""
|
||||
prev["content"] = (
|
||||
f"{prev_content}\n{curr_content}" if prev_content else curr_content
|
||||
)
|
||||
|
||||
curr_tool_calls = curr.get("tool_calls")
|
||||
if curr_tool_calls:
|
||||
prev_tool_calls = prev.get("tool_calls")
|
||||
prev["tool_calls"] = (
|
||||
list(prev_tool_calls) + list(curr_tool_calls)
|
||||
if prev_tool_calls
|
||||
else list(curr_tool_calls)
|
||||
)
|
||||
return result
|
||||
|
||||
def to_openai_messages(self) -> list[ChatCompletionMessageParam]:
|
||||
messages = []
|
||||
for message in self.messages:
|
||||
@@ -319,7 +258,7 @@ class ChatSession(BaseModel):
|
||||
name=message.name or "",
|
||||
)
|
||||
)
|
||||
return self._merge_consecutive_assistant_messages(messages)
|
||||
return messages
|
||||
|
||||
|
||||
async def _get_session_from_cache(session_id: str) -> ChatSession | None:
|
||||
|
||||
@@ -1,16 +1,4 @@
|
||||
from typing import cast
|
||||
|
||||
import pytest
|
||||
from openai.types.chat import (
|
||||
ChatCompletionAssistantMessageParam,
|
||||
ChatCompletionMessageParam,
|
||||
ChatCompletionToolMessageParam,
|
||||
ChatCompletionUserMessageParam,
|
||||
)
|
||||
from openai.types.chat.chat_completion_message_tool_call_param import (
|
||||
ChatCompletionMessageToolCallParam,
|
||||
Function,
|
||||
)
|
||||
|
||||
from .model import (
|
||||
ChatMessage,
|
||||
@@ -129,205 +117,3 @@ async def test_chatsession_db_storage(setup_test_user, test_user_id):
|
||||
loaded.tool_calls is not None
|
||||
), f"Tool calls missing for {orig.role} message"
|
||||
assert len(orig.tool_calls) == len(loaded.tool_calls)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------- #
|
||||
# _merge_consecutive_assistant_messages #
|
||||
# --------------------------------------------------------------------------- #
|
||||
|
||||
_tc = ChatCompletionMessageToolCallParam(
|
||||
id="tc1", type="function", function=Function(name="do_stuff", arguments="{}")
|
||||
)
|
||||
_tc2 = ChatCompletionMessageToolCallParam(
|
||||
id="tc2", type="function", function=Function(name="other", arguments="{}")
|
||||
)
|
||||
|
||||
|
||||
def test_merge_noop_when_no_consecutive_assistants():
|
||||
"""Messages without consecutive assistants are returned unchanged."""
|
||||
msgs = [
|
||||
ChatCompletionUserMessageParam(role="user", content="hi"),
|
||||
ChatCompletionAssistantMessageParam(role="assistant", content="hello"),
|
||||
ChatCompletionUserMessageParam(role="user", content="bye"),
|
||||
]
|
||||
merged = ChatSession._merge_consecutive_assistant_messages(msgs)
|
||||
assert len(merged) == 3
|
||||
assert [m["role"] for m in merged] == ["user", "assistant", "user"]
|
||||
|
||||
|
||||
def test_merge_splits_text_and_tool_calls():
|
||||
"""The exact bug scenario: text-only assistant followed by tool_calls-only assistant."""
|
||||
msgs = [
|
||||
ChatCompletionUserMessageParam(role="user", content="build agent"),
|
||||
ChatCompletionAssistantMessageParam(
|
||||
role="assistant", content="Let me build that"
|
||||
),
|
||||
ChatCompletionAssistantMessageParam(
|
||||
role="assistant", content="", tool_calls=[_tc]
|
||||
),
|
||||
ChatCompletionToolMessageParam(role="tool", content="ok", tool_call_id="tc1"),
|
||||
]
|
||||
merged = ChatSession._merge_consecutive_assistant_messages(msgs)
|
||||
|
||||
assert len(merged) == 3
|
||||
assert merged[0]["role"] == "user"
|
||||
assert merged[2]["role"] == "tool"
|
||||
a = cast(ChatCompletionAssistantMessageParam, merged[1])
|
||||
assert a["role"] == "assistant"
|
||||
assert a.get("content") == "Let me build that"
|
||||
assert a.get("tool_calls") == [_tc]
|
||||
|
||||
|
||||
def test_merge_combines_tool_calls_from_both():
|
||||
"""Both consecutive assistants have tool_calls — they get merged."""
|
||||
msgs: list[ChatCompletionAssistantMessageParam] = [
|
||||
ChatCompletionAssistantMessageParam(
|
||||
role="assistant", content="text", tool_calls=[_tc]
|
||||
),
|
||||
ChatCompletionAssistantMessageParam(
|
||||
role="assistant", content="", tool_calls=[_tc2]
|
||||
),
|
||||
]
|
||||
merged = ChatSession._merge_consecutive_assistant_messages(msgs) # type: ignore[arg-type]
|
||||
|
||||
assert len(merged) == 1
|
||||
a = cast(ChatCompletionAssistantMessageParam, merged[0])
|
||||
assert a.get("tool_calls") == [_tc, _tc2]
|
||||
assert a.get("content") == "text"
|
||||
|
||||
|
||||
def test_merge_three_consecutive_assistants():
|
||||
"""Three consecutive assistants collapse into one."""
|
||||
msgs: list[ChatCompletionAssistantMessageParam] = [
|
||||
ChatCompletionAssistantMessageParam(role="assistant", content="a"),
|
||||
ChatCompletionAssistantMessageParam(role="assistant", content="b"),
|
||||
ChatCompletionAssistantMessageParam(
|
||||
role="assistant", content="", tool_calls=[_tc]
|
||||
),
|
||||
]
|
||||
merged = ChatSession._merge_consecutive_assistant_messages(msgs) # type: ignore[arg-type]
|
||||
|
||||
assert len(merged) == 1
|
||||
a = cast(ChatCompletionAssistantMessageParam, merged[0])
|
||||
assert a.get("content") == "a\nb"
|
||||
assert a.get("tool_calls") == [_tc]
|
||||
|
||||
|
||||
def test_merge_empty_and_single_message():
|
||||
"""Edge cases: empty list and single message."""
|
||||
assert ChatSession._merge_consecutive_assistant_messages([]) == []
|
||||
|
||||
single: list[ChatCompletionMessageParam] = [
|
||||
ChatCompletionUserMessageParam(role="user", content="hi")
|
||||
]
|
||||
assert ChatSession._merge_consecutive_assistant_messages(single) == single
|
||||
|
||||
|
||||
# --------------------------------------------------------------------------- #
|
||||
# add_tool_call_to_current_turn #
|
||||
# --------------------------------------------------------------------------- #
|
||||
|
||||
_raw_tc = {
|
||||
"id": "tc1",
|
||||
"type": "function",
|
||||
"function": {"name": "f", "arguments": "{}"},
|
||||
}
|
||||
_raw_tc2 = {
|
||||
"id": "tc2",
|
||||
"type": "function",
|
||||
"function": {"name": "g", "arguments": "{}"},
|
||||
}
|
||||
|
||||
|
||||
def test_add_tool_call_appends_to_existing_assistant():
|
||||
"""When the last assistant is from the current turn, tool_call is added to it."""
|
||||
session = ChatSession.new(user_id="u")
|
||||
session.messages = [
|
||||
ChatMessage(role="user", content="hi"),
|
||||
ChatMessage(role="assistant", content="working on it"),
|
||||
]
|
||||
session.add_tool_call_to_current_turn(_raw_tc)
|
||||
|
||||
assert len(session.messages) == 2 # no new message created
|
||||
assert session.messages[1].tool_calls == [_raw_tc]
|
||||
|
||||
|
||||
def test_add_tool_call_creates_assistant_when_none_exists():
|
||||
"""When there's no current-turn assistant, a new one is created."""
|
||||
session = ChatSession.new(user_id="u")
|
||||
session.messages = [
|
||||
ChatMessage(role="user", content="hi"),
|
||||
]
|
||||
session.add_tool_call_to_current_turn(_raw_tc)
|
||||
|
||||
assert len(session.messages) == 2
|
||||
assert session.messages[1].role == "assistant"
|
||||
assert session.messages[1].tool_calls == [_raw_tc]
|
||||
|
||||
|
||||
def test_add_tool_call_does_not_cross_user_boundary():
|
||||
"""A user message acts as a boundary — previous assistant is not modified."""
|
||||
session = ChatSession.new(user_id="u")
|
||||
session.messages = [
|
||||
ChatMessage(role="assistant", content="old turn"),
|
||||
ChatMessage(role="user", content="new message"),
|
||||
]
|
||||
session.add_tool_call_to_current_turn(_raw_tc)
|
||||
|
||||
assert len(session.messages) == 3 # new assistant was created
|
||||
assert session.messages[0].tool_calls is None # old assistant untouched
|
||||
assert session.messages[2].role == "assistant"
|
||||
assert session.messages[2].tool_calls == [_raw_tc]
|
||||
|
||||
|
||||
def test_add_tool_call_multiple_times():
|
||||
"""Multiple long-running tool calls accumulate on the same assistant."""
|
||||
session = ChatSession.new(user_id="u")
|
||||
session.messages = [
|
||||
ChatMessage(role="user", content="hi"),
|
||||
ChatMessage(role="assistant", content="doing stuff"),
|
||||
]
|
||||
session.add_tool_call_to_current_turn(_raw_tc)
|
||||
# Simulate a pending tool result in between (like _yield_tool_call does)
|
||||
session.messages.append(
|
||||
ChatMessage(role="tool", content="pending", tool_call_id="tc1")
|
||||
)
|
||||
session.add_tool_call_to_current_turn(_raw_tc2)
|
||||
|
||||
assert len(session.messages) == 3 # user, assistant, tool — no extra assistant
|
||||
assert session.messages[1].tool_calls == [_raw_tc, _raw_tc2]
|
||||
|
||||
|
||||
def test_to_openai_messages_merges_split_assistants():
|
||||
"""End-to-end: session with split assistants produces valid OpenAI messages."""
|
||||
session = ChatSession.new(user_id="u")
|
||||
session.messages = [
|
||||
ChatMessage(role="user", content="build agent"),
|
||||
ChatMessage(role="assistant", content="Let me build that"),
|
||||
ChatMessage(
|
||||
role="assistant",
|
||||
content="",
|
||||
tool_calls=[
|
||||
{
|
||||
"id": "tc1",
|
||||
"type": "function",
|
||||
"function": {"name": "create_agent", "arguments": "{}"},
|
||||
}
|
||||
],
|
||||
),
|
||||
ChatMessage(role="tool", content="done", tool_call_id="tc1"),
|
||||
ChatMessage(role="assistant", content="Saved!"),
|
||||
ChatMessage(role="user", content="show me an example run"),
|
||||
]
|
||||
openai_msgs = session.to_openai_messages()
|
||||
|
||||
# The two consecutive assistants at index 1,2 should be merged
|
||||
roles = [m["role"] for m in openai_msgs]
|
||||
assert roles == ["user", "assistant", "tool", "assistant", "user"]
|
||||
|
||||
# The merged assistant should have both content and tool_calls
|
||||
merged = cast(ChatCompletionAssistantMessageParam, openai_msgs[1])
|
||||
assert merged.get("content") == "Let me build that"
|
||||
tc_list = merged.get("tool_calls")
|
||||
assert tc_list is not None and len(list(tc_list)) == 1
|
||||
assert list(tc_list)[0]["id"] == "tc1"
|
||||
|
||||
@@ -10,8 +10,6 @@ from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.util.json import dumps as json_dumps
|
||||
|
||||
|
||||
class ResponseType(str, Enum):
|
||||
"""Types of streaming responses following AI SDK protocol."""
|
||||
@@ -195,18 +193,6 @@ class StreamError(StreamBaseResponse):
|
||||
default=None, description="Additional error details"
|
||||
)
|
||||
|
||||
def to_sse(self) -> str:
|
||||
"""Convert to SSE format, only emitting fields required by AI SDK protocol.
|
||||
|
||||
The AI SDK uses z.strictObject({type, errorText}) which rejects
|
||||
any extra fields like `code` or `details`.
|
||||
"""
|
||||
data = {
|
||||
"type": self.type.value,
|
||||
"errorText": self.errorText,
|
||||
}
|
||||
return f"data: {json_dumps(data)}\n\n"
|
||||
|
||||
|
||||
class StreamHeartbeat(StreamBaseResponse):
|
||||
"""Heartbeat to keep SSE connection alive during long-running operations.
|
||||
|
||||
@@ -80,6 +80,19 @@ settings = Settings()
|
||||
client = openai.AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
|
||||
|
||||
|
||||
def _apply_thinking_config(extra_body: dict[str, Any], model: str) -> None:
|
||||
"""Apply extended thinking configuration for Anthropic models via OpenRouter.
|
||||
|
||||
OpenRouter's reasoning API expects either:
|
||||
- {"max_tokens": N} for explicit token budget
|
||||
- {"effort": "high"} for automatic budget
|
||||
|
||||
See: https://openrouter.ai/docs/reasoning-tokens
|
||||
"""
|
||||
if config.thinking_enabled and "anthropic" in model.lower():
|
||||
extra_body["reasoning"] = {"max_tokens": config.thinking_budget_tokens}
|
||||
|
||||
|
||||
langfuse = get_client()
|
||||
|
||||
# Redis key prefix for tracking running long-running operations
|
||||
@@ -800,13 +813,9 @@ async def stream_chat_completion(
|
||||
# Build the messages list in the correct order
|
||||
messages_to_save: list[ChatMessage] = []
|
||||
|
||||
# Add assistant message with tool_calls if any.
|
||||
# Use extend (not assign) to preserve tool_calls already added by
|
||||
# _yield_tool_call for long-running tools.
|
||||
# Add assistant message with tool_calls if any
|
||||
if accumulated_tool_calls:
|
||||
if not assistant_response.tool_calls:
|
||||
assistant_response.tool_calls = []
|
||||
assistant_response.tool_calls.extend(accumulated_tool_calls)
|
||||
assistant_response.tool_calls = accumulated_tool_calls
|
||||
logger.info(
|
||||
f"Added {len(accumulated_tool_calls)} tool calls to assistant message"
|
||||
)
|
||||
@@ -1070,9 +1079,8 @@ async def _stream_chat_chunks(
|
||||
:128
|
||||
] # OpenRouter limit
|
||||
|
||||
# Enable adaptive thinking for Anthropic models via OpenRouter
|
||||
if config.thinking_enabled and "anthropic" in model.lower():
|
||||
extra_body["reasoning"] = {"enabled": True}
|
||||
# Enable extended thinking for Anthropic models via OpenRouter
|
||||
_apply_thinking_config(extra_body, model)
|
||||
|
||||
api_call_start = time_module.perf_counter()
|
||||
stream = await client.chat.completions.create(
|
||||
@@ -1408,9 +1416,13 @@ async def _yield_tool_call(
|
||||
operation_id=operation_id,
|
||||
)
|
||||
|
||||
# Attach the tool_call to the current turn's assistant message
|
||||
# (or create one if this is a tool-only response with no text).
|
||||
session.add_tool_call_to_current_turn(tool_calls[yield_idx])
|
||||
# Save assistant message with tool_call FIRST (required by LLM)
|
||||
assistant_message = ChatMessage(
|
||||
role="assistant",
|
||||
content="",
|
||||
tool_calls=[tool_calls[yield_idx]],
|
||||
)
|
||||
session.messages.append(assistant_message)
|
||||
|
||||
# Then save pending tool result
|
||||
pending_message = ChatMessage(
|
||||
@@ -1833,9 +1845,8 @@ async def _generate_llm_continuation(
|
||||
if session_id:
|
||||
extra_body["session_id"] = session_id[:128]
|
||||
|
||||
# Enable adaptive thinking for Anthropic models via OpenRouter
|
||||
if config.thinking_enabled and "anthropic" in config.model.lower():
|
||||
extra_body["reasoning"] = {"enabled": True}
|
||||
# Enable extended thinking for Anthropic models via OpenRouter
|
||||
_apply_thinking_config(extra_body, config.model)
|
||||
|
||||
retry_count = 0
|
||||
last_error: Exception | None = None
|
||||
@@ -1967,9 +1978,8 @@ async def _generate_llm_continuation_with_streaming(
|
||||
if session_id:
|
||||
extra_body["session_id"] = session_id[:128]
|
||||
|
||||
# Enable adaptive thinking for Anthropic models via OpenRouter
|
||||
if config.thinking_enabled and "anthropic" in config.model.lower():
|
||||
extra_body["reasoning"] = {"enabled": True}
|
||||
# Enable extended thinking for Anthropic models via OpenRouter
|
||||
_apply_thinking_config(extra_body, config.model)
|
||||
|
||||
# Make streaming LLM call (no tools - just text response)
|
||||
from typing import cast
|
||||
|
||||
@@ -21,71 +21,43 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class HumanInTheLoopBlock(Block):
|
||||
"""
|
||||
Pauses execution and waits for human approval or rejection of the data.
|
||||
This block pauses execution and waits for human approval or modification of the data.
|
||||
|
||||
When executed, this block creates a pending review entry and sets the node execution
|
||||
status to REVIEW. The execution remains paused until a human user either approves
|
||||
or rejects the data.
|
||||
When executed, it creates a pending review entry and sets the node execution status
|
||||
to REVIEW. The execution will remain paused until a human user either:
|
||||
- Approves the data (with or without modifications)
|
||||
- Rejects the data
|
||||
|
||||
**How it works:**
|
||||
- The input data is presented to a human reviewer
|
||||
- The reviewer can approve or reject (and optionally modify the data if editable)
|
||||
- On approval: the data flows out through the `approved_data` output pin
|
||||
- On rejection: the data flows out through the `rejected_data` output pin
|
||||
|
||||
**Important:** The output pins yield the actual data itself, NOT status strings.
|
||||
The approval/rejection decision determines WHICH output pin fires, not the value.
|
||||
You do NOT need to compare the output to "APPROVED" or "REJECTED" - simply connect
|
||||
downstream blocks to the appropriate output pin for each case.
|
||||
|
||||
**Example usage:**
|
||||
- Connect `approved_data` → next step in your workflow (data was approved)
|
||||
- Connect `rejected_data` → error handling or notification (data was rejected)
|
||||
This is useful for workflows that require human validation or intervention before
|
||||
proceeding to the next steps.
|
||||
"""
|
||||
|
||||
class Input(BlockSchemaInput):
|
||||
data: Any = SchemaField(
|
||||
description="The data to be reviewed by a human user. "
|
||||
"This exact data will be passed through to either approved_data or "
|
||||
"rejected_data output based on the reviewer's decision."
|
||||
)
|
||||
data: Any = SchemaField(description="The data to be reviewed by a human user")
|
||||
name: str = SchemaField(
|
||||
description="A descriptive name for what this data represents. "
|
||||
"This helps the reviewer understand what they are reviewing.",
|
||||
description="A descriptive name for what this data represents",
|
||||
)
|
||||
editable: bool = SchemaField(
|
||||
description="Whether the human reviewer can edit the data before "
|
||||
"approving or rejecting it",
|
||||
description="Whether the human reviewer can edit the data",
|
||||
default=True,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
approved_data: Any = SchemaField(
|
||||
description="Outputs the input data when the reviewer APPROVES it. "
|
||||
"The value is the actual data itself (not a status string like 'APPROVED'). "
|
||||
"If the reviewer edited the data, this contains the modified version. "
|
||||
"Connect downstream blocks here for the 'approved' workflow path."
|
||||
description="The data when approved (may be modified by reviewer)"
|
||||
)
|
||||
rejected_data: Any = SchemaField(
|
||||
description="Outputs the input data when the reviewer REJECTS it. "
|
||||
"The value is the actual data itself (not a status string like 'REJECTED'). "
|
||||
"If the reviewer edited the data, this contains the modified version. "
|
||||
"Connect downstream blocks here for the 'rejected' workflow path."
|
||||
description="The data when rejected (may be modified by reviewer)"
|
||||
)
|
||||
review_message: str = SchemaField(
|
||||
description="Optional message provided by the reviewer explaining their "
|
||||
"decision. Only outputs when the reviewer provides a message; "
|
||||
"this pin does not fire if no message was given.",
|
||||
default="",
|
||||
description="Any message provided by the reviewer", default=""
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8b2a7b3c-6e9d-4a5f-8c1b-2e3f4a5b6c7d",
|
||||
description="Pause execution for human review. Data flows through "
|
||||
"approved_data or rejected_data output based on the reviewer's decision. "
|
||||
"Outputs contain the actual data, not status strings.",
|
||||
description="Pause execution and wait for human approval or modification of data",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=HumanInTheLoopBlock.Input,
|
||||
output_schema=HumanInTheLoopBlock.Output,
|
||||
|
||||
@@ -743,11 +743,6 @@ class GraphModel(Graph, GraphMeta):
|
||||
# For invalid blocks, we still raise immediately as this is a structural issue
|
||||
raise ValueError(f"Invalid block {node.block_id} for node #{node.id}")
|
||||
|
||||
if block.disabled:
|
||||
raise ValueError(
|
||||
f"Block {node.block_id} is disabled and cannot be used in graphs"
|
||||
)
|
||||
|
||||
node_input_mask = (
|
||||
nodes_input_masks.get(node.id, {}) if nodes_input_masks else {}
|
||||
)
|
||||
|
||||
@@ -213,9 +213,6 @@ async def execute_node(
|
||||
block_name=node_block.name,
|
||||
)
|
||||
|
||||
if node_block.disabled:
|
||||
raise ValueError(f"Block {node_block.id} is disabled and cannot be executed")
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
input_data, error = validate_exec(node, data.inputs, resolve_input=False)
|
||||
if input_data is None:
|
||||
|
||||
@@ -364,44 +364,6 @@ def _remove_orphan_tool_responses(
|
||||
return result
|
||||
|
||||
|
||||
def validate_and_remove_orphan_tool_responses(
|
||||
messages: list[dict],
|
||||
log_warning: bool = True,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Validate tool_call/tool_response pairs and remove orphaned responses.
|
||||
|
||||
Scans messages in order, tracking all tool_call IDs. Any tool response
|
||||
referencing an ID not seen in a preceding message is considered orphaned
|
||||
and removed. This prevents API errors like Anthropic's "unexpected tool_use_id".
|
||||
|
||||
Args:
|
||||
messages: List of messages to validate (OpenAI or Anthropic format)
|
||||
log_warning: Whether to log a warning when orphans are found
|
||||
|
||||
Returns:
|
||||
A new list with orphaned tool responses removed
|
||||
"""
|
||||
available_ids: set[str] = set()
|
||||
orphan_ids: set[str] = set()
|
||||
|
||||
for msg in messages:
|
||||
available_ids |= _extract_tool_call_ids_from_message(msg)
|
||||
for resp_id in _extract_tool_response_ids_from_message(msg):
|
||||
if resp_id not in available_ids:
|
||||
orphan_ids.add(resp_id)
|
||||
|
||||
if not orphan_ids:
|
||||
return messages
|
||||
|
||||
if log_warning:
|
||||
logger.warning(
|
||||
f"Removing {len(orphan_ids)} orphan tool response(s): {orphan_ids}"
|
||||
)
|
||||
|
||||
return _remove_orphan_tool_responses(messages, orphan_ids)
|
||||
|
||||
|
||||
def _ensure_tool_pairs_intact(
|
||||
recent_messages: list[dict],
|
||||
all_messages: list[dict],
|
||||
@@ -761,13 +723,6 @@ async def compress_context(
|
||||
|
||||
# Filter out any None values that may have been introduced
|
||||
final_msgs: list[dict] = [m for m in msgs if m is not None]
|
||||
|
||||
# ---- STEP 6: Final tool-pair validation ---------------------------------
|
||||
# After all compression steps, verify that every tool response has a
|
||||
# matching tool_call in a preceding assistant message. Remove orphans
|
||||
# to prevent API errors (e.g., Anthropic's "unexpected tool_use_id").
|
||||
final_msgs = validate_and_remove_orphan_tool_responses(final_msgs)
|
||||
|
||||
final_count = sum(_msg_tokens(m, enc) for m in final_msgs)
|
||||
error = None
|
||||
if final_count + reserve > target_tokens:
|
||||
|
||||
10
autogpt_platform/backend/poetry.lock
generated
10
autogpt_platform/backend/poetry.lock
generated
@@ -46,14 +46,14 @@ pycares = ">=4.9.0,<5"
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
version = "25.1.0"
|
||||
version = "24.1.0"
|
||||
description = "File support for asyncio."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695"},
|
||||
{file = "aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2"},
|
||||
{file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"},
|
||||
{file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -8440,4 +8440,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "c06e96ad49388ba7a46786e9ea55ea2c1a57408e15613237b4bee40a592a12af"
|
||||
content-hash = "fc135114e01de39c8adf70f6132045e7d44a19473c1279aee0978de65aad1655"
|
||||
|
||||
@@ -76,7 +76,7 @@ yt-dlp = "2025.12.08"
|
||||
zerobouncesdk = "^1.1.2"
|
||||
# NOTE: please insert new dependencies in their alphabetical location
|
||||
pytest-snapshot = "^0.9.0"
|
||||
aiofiles = "^25.1.0"
|
||||
aiofiles = "^24.1.0"
|
||||
tiktoken = "^0.12.0"
|
||||
aioclamd = "^1.0.0"
|
||||
setuptools = "^80.9.0"
|
||||
|
||||
@@ -4,7 +4,7 @@ import {
|
||||
} from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { parseAsInteger, parseAsString, useQueryStates } from "nuqs";
|
||||
import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
|
||||
import { GraphExecutionMeta } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/use-agent-runs";
|
||||
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
"use client";
|
||||
|
||||
import { Tabs, TabsList, TabsTrigger } from "@/components/__legacy__/ui/tabs";
|
||||
|
||||
export type BuilderView = "old" | "new";
|
||||
|
||||
export function BuilderViewTabs({
|
||||
value,
|
||||
onChange,
|
||||
}: {
|
||||
value: BuilderView;
|
||||
onChange: (value: BuilderView) => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="pointer-events-auto fixed right-4 top-20 z-50">
|
||||
<Tabs
|
||||
value={value}
|
||||
onValueChange={(v: string) => onChange(v as BuilderView)}
|
||||
>
|
||||
<TabsList className="w-fit bg-zinc-900">
|
||||
<TabsTrigger value="old" className="text-gray-100">
|
||||
Old
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="new" className="text-gray-100">
|
||||
New
|
||||
</TabsTrigger>
|
||||
</TabsList>
|
||||
</Tabs>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -23,9 +23,6 @@ import { useCopyPaste } from "./useCopyPaste";
|
||||
import { useFlow } from "./useFlow";
|
||||
import { useFlowRealtime } from "./useFlowRealtime";
|
||||
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import "./flow.css";
|
||||
|
||||
export const Flow = () => {
|
||||
const [{ flowID, flowExecutionID }] = useQueryStates({
|
||||
flowID: parseAsString,
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
/* Reset default xyflow handle styles so custom Phosphor icon handles render correctly */
|
||||
.react-flow__handle {
|
||||
background: transparent;
|
||||
width: auto;
|
||||
height: auto;
|
||||
border: 0;
|
||||
position: relative;
|
||||
transform: none;
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
import { useMemo } from "react";
|
||||
|
||||
import { Link } from "@/app/api/__generated__/models/link";
|
||||
import { useEdgeStore } from "../stores/edgeStore";
|
||||
import { useNodeStore } from "../stores/nodeStore";
|
||||
import { scrollbarStyles } from "@/components/styles/scrollbars";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { customEdgeToLink } from "./helper";
|
||||
|
||||
export const RightSidebar = () => {
|
||||
const edges = useEdgeStore((s) => s.edges);
|
||||
const nodes = useNodeStore((s) => s.nodes);
|
||||
|
||||
const backendLinks: Link[] = useMemo(
|
||||
() => edges.map(customEdgeToLink),
|
||||
[edges],
|
||||
);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-full w-full flex-col border-l border-slate-200 bg-white p-4 dark:border-slate-700 dark:bg-slate-900",
|
||||
scrollbarStyles,
|
||||
)}
|
||||
>
|
||||
<div className="mb-4">
|
||||
<h2 className="text-lg font-semibold text-slate-800 dark:text-slate-200">
|
||||
Graph Debug Panel
|
||||
</h2>
|
||||
</div>
|
||||
|
||||
<div className="flex-1 overflow-y-auto">
|
||||
<h3 className="mb-2 text-sm font-semibold text-slate-700 dark:text-slate-200">
|
||||
Nodes ({nodes.length})
|
||||
</h3>
|
||||
<div className="mb-6 space-y-3">
|
||||
{nodes.map((n) => (
|
||||
<div
|
||||
key={n.id}
|
||||
className="rounded border p-2 text-xs dark:border-slate-700"
|
||||
>
|
||||
<div className="mb-1 font-medium">
|
||||
#{n.id} {n.data?.title ? `– ${n.data.title}` : ""}
|
||||
</div>
|
||||
<div className="text-slate-500 dark:text-slate-400">
|
||||
hardcodedValues
|
||||
</div>
|
||||
<pre className="mt-1 max-h-40 overflow-auto rounded bg-slate-50 p-2 dark:bg-slate-800">
|
||||
{JSON.stringify(n.data?.hardcodedValues ?? {}, null, 2)}
|
||||
</pre>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<h3 className="mb-2 text-sm font-semibold text-slate-700 dark:text-slate-200">
|
||||
Links ({backendLinks.length})
|
||||
</h3>
|
||||
<div className="mb-6 space-y-3">
|
||||
{backendLinks.map((l) => (
|
||||
<div
|
||||
key={l.id}
|
||||
className="rounded border p-2 text-xs dark:border-slate-700"
|
||||
>
|
||||
<div className="font-medium">
|
||||
{l.source_id}[{l.source_name}] → {l.sink_id}[{l.sink_name}]
|
||||
</div>
|
||||
<div className="mt-1 text-slate-500 dark:text-slate-400">
|
||||
edge.id: {l.id}
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<h4 className="mb-2 text-xs font-semibold text-slate-600 dark:text-slate-300">
|
||||
Backend Links JSON
|
||||
</h4>
|
||||
<pre className="max-h-64 overflow-auto rounded bg-slate-50 p-2 text-[11px] dark:bg-slate-800">
|
||||
{JSON.stringify(backendLinks, null, 2)}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import { useCallback } from "react";
|
||||
|
||||
import { AgentRunDraftView } from "@/app/(platform)/build/components/legacy-builder/agent-run-draft-view";
|
||||
import { AgentRunDraftView } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/agent-run-draft-view";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import type {
|
||||
CredentialsMetaInput,
|
||||
|
||||
@@ -18,7 +18,7 @@ import {
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { getGetV2ListMySubmissionsQueryKey } from "@/app/api/__generated__/endpoints/store/store";
|
||||
import { CronExpressionDialog } from "@/components/contextual/CronScheduler/cron-scheduler-dialog";
|
||||
import { CronExpressionDialog } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog";
|
||||
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
||||
import { CalendarClockIcon } from "lucide-react";
|
||||
|
||||
|
||||
@@ -1,13 +1,64 @@
|
||||
"use client";
|
||||
import { ReactFlowProvider } from "@xyflow/react";
|
||||
import { Flow } from "./components/FlowEditor/Flow/Flow";
|
||||
|
||||
export default function BuilderPage() {
|
||||
import FlowEditor from "@/app/(platform)/build/components/legacy-builder/Flow/Flow";
|
||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||
// import LoadingBox from "@/components/__legacy__/ui/loading";
|
||||
import { GraphID } from "@/lib/autogpt-server-api/types";
|
||||
import { ReactFlowProvider } from "@xyflow/react";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { BuilderViewTabs } from "./components/BuilderViewTabs/BuilderViewTabs";
|
||||
import { Flow } from "./components/FlowEditor/Flow/Flow";
|
||||
import { useBuilderView } from "./useBuilderView";
|
||||
|
||||
function BuilderContent() {
|
||||
const query = useSearchParams();
|
||||
const { completeStep } = useOnboarding();
|
||||
|
||||
useEffect(() => {
|
||||
completeStep("BUILDER_OPEN");
|
||||
}, [completeStep]);
|
||||
|
||||
const _graphVersion = query.get("flowVersion");
|
||||
const graphVersion = _graphVersion ? parseInt(_graphVersion) : undefined;
|
||||
return (
|
||||
<div className="relative h-full w-full">
|
||||
<ReactFlowProvider>
|
||||
<Flow />
|
||||
</ReactFlowProvider>
|
||||
</div>
|
||||
<FlowEditor
|
||||
className="flex h-full w-full"
|
||||
flowID={(query.get("flowID") as GraphID | null) ?? undefined}
|
||||
flowVersion={graphVersion}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export default function BuilderPage() {
|
||||
const {
|
||||
isSwitchEnabled,
|
||||
selectedView,
|
||||
setSelectedView,
|
||||
isNewFlowEditorEnabled,
|
||||
} = useBuilderView();
|
||||
|
||||
// Switch is temporary, we will remove it once our new flow editor is ready
|
||||
if (isSwitchEnabled) {
|
||||
return (
|
||||
<div className="relative h-full w-full">
|
||||
<BuilderViewTabs value={selectedView} onChange={setSelectedView} />
|
||||
{selectedView === "new" ? (
|
||||
<ReactFlowProvider>
|
||||
<Flow />
|
||||
</ReactFlowProvider>
|
||||
) : (
|
||||
<BuilderContent />
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return isNewFlowEditorEnabled ? (
|
||||
<ReactFlowProvider>
|
||||
<Flow />
|
||||
</ReactFlowProvider>
|
||||
) : (
|
||||
<BuilderContent />
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||
import { useEffect, useMemo } from "react";
|
||||
import { BuilderView } from "./components/BuilderViewTabs/BuilderViewTabs";
|
||||
|
||||
export function useBuilderView() {
|
||||
const isNewFlowEditorEnabled = useGetFlag(Flag.NEW_FLOW_EDITOR);
|
||||
const isBuilderViewSwitchEnabled = useGetFlag(Flag.BUILDER_VIEW_SWITCH);
|
||||
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
const searchParams = useSearchParams();
|
||||
|
||||
const currentView = searchParams.get("view");
|
||||
const defaultView = "old";
|
||||
const selectedView = useMemo<BuilderView>(() => {
|
||||
if (currentView === "new" || currentView === "old") return currentView;
|
||||
return defaultView;
|
||||
}, [currentView, defaultView]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isBuilderViewSwitchEnabled === true) {
|
||||
if (currentView !== "new" && currentView !== "old") {
|
||||
const params = new URLSearchParams(searchParams);
|
||||
params.set("view", defaultView);
|
||||
router.replace(`${pathname}?${params.toString()}`);
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [isBuilderViewSwitchEnabled, defaultView, pathname, router, searchParams]);
|
||||
|
||||
const setSelectedView = (value: BuilderView) => {
|
||||
const params = new URLSearchParams(searchParams);
|
||||
params.set("view", value);
|
||||
router.push(`${pathname}?${params.toString()}`);
|
||||
};
|
||||
|
||||
return {
|
||||
isSwitchEnabled: isBuilderViewSwitchEnabled === true,
|
||||
selectedView,
|
||||
setSelectedView,
|
||||
isNewFlowEditorEnabled: Boolean(isNewFlowEditorEnabled),
|
||||
} as const;
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { SidebarProvider } from "@/components/ui/sidebar";
|
||||
import { ChatContainer } from "./components/ChatContainer/ChatContainer";
|
||||
import { ChatSidebar } from "./components/ChatSidebar/ChatSidebar";
|
||||
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
|
||||
import { MobileHeader } from "./components/MobileHeader/MobileHeader";
|
||||
import { ScaleLoader } from "./components/ScaleLoader/ScaleLoader";
|
||||
import { useCopilotPage } from "./useCopilotPage";
|
||||
|
||||
export function CopilotPage() {
|
||||
@@ -34,11 +34,7 @@ export function CopilotPage() {
|
||||
} = useCopilotPage();
|
||||
|
||||
if (isUserLoading || !isLoggedIn) {
|
||||
return (
|
||||
<div className="fixed inset-0 z-50 flex items-center justify-center bg-[#f8f8f9]">
|
||||
<ScaleLoader className="text-neutral-400" />
|
||||
</div>
|
||||
);
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
@@ -10,9 +10,8 @@ import {
|
||||
MessageResponse,
|
||||
} from "@/components/ai-elements/message";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { toast } from "@/components/molecules/Toast/use-toast";
|
||||
import { ToolUIPart, UIDataTypes, UIMessage, UITools } from "ai";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { CreateAgentTool } from "../../tools/CreateAgent/CreateAgent";
|
||||
import { EditAgentTool } from "../../tools/EditAgent/EditAgent";
|
||||
import { FindAgentsTool } from "../../tools/FindAgents/FindAgents";
|
||||
@@ -122,7 +121,6 @@ export const ChatMessagesContainer = ({
|
||||
isLoading,
|
||||
}: ChatMessagesContainerProps) => {
|
||||
const [thinkingPhrase, setThinkingPhrase] = useState(getRandomPhrase);
|
||||
const lastToastTimeRef = useRef(0);
|
||||
|
||||
useEffect(() => {
|
||||
if (status === "submitted") {
|
||||
@@ -130,20 +128,6 @@ export const ChatMessagesContainer = ({
|
||||
}
|
||||
}, [status]);
|
||||
|
||||
// Show a toast when a new error occurs, debounced to avoid spam
|
||||
useEffect(() => {
|
||||
if (!error) return;
|
||||
const now = Date.now();
|
||||
if (now - lastToastTimeRef.current < 3_000) return;
|
||||
lastToastTimeRef.current = now;
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Something went wrong",
|
||||
description:
|
||||
"The assistant encountered an error. Please try sending your message again.",
|
||||
});
|
||||
}, [error]);
|
||||
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const lastAssistantHasVisibleContent =
|
||||
lastMessage?.role === "assistant" &&
|
||||
@@ -159,10 +143,10 @@ export const ChatMessagesContainer = ({
|
||||
|
||||
return (
|
||||
<Conversation className="min-h-0 flex-1">
|
||||
<ConversationContent className="flex min-h-screen flex-1 flex-col gap-6 px-3 py-6">
|
||||
<ConversationContent className="gap-6 px-3 py-6">
|
||||
{isLoading && messages.length === 0 && (
|
||||
<div className="flex min-h-full flex-1 items-center justify-center">
|
||||
<LoadingSpinner className="text-neutral-600" />
|
||||
<div className="flex flex-1 items-center justify-center">
|
||||
<LoadingSpinner size="large" className="text-neutral-400" />
|
||||
</div>
|
||||
)}
|
||||
{messages.map((message, messageIndex) => {
|
||||
@@ -279,12 +263,8 @@ export const ChatMessagesContainer = ({
|
||||
</Message>
|
||||
)}
|
||||
{error && (
|
||||
<div className="rounded-lg bg-red-50 p-4 text-sm text-red-700">
|
||||
<p className="font-medium">Something went wrong</p>
|
||||
<p className="mt-1 text-red-600">
|
||||
The assistant encountered an error. Please try sending your
|
||||
message again.
|
||||
</p>
|
||||
<div className="rounded-lg bg-red-50 p-3 text-red-600">
|
||||
Error: {error.message}
|
||||
</div>
|
||||
)}
|
||||
</ConversationContent>
|
||||
|
||||
@@ -121,8 +121,8 @@ export function ChatSidebar() {
|
||||
className="mt-4 flex flex-col gap-1"
|
||||
>
|
||||
{isLoadingSessions ? (
|
||||
<div className="flex min-h-[30rem] items-center justify-center py-4">
|
||||
<LoadingSpinner size="small" className="text-neutral-600" />
|
||||
<div className="flex items-center justify-center py-4">
|
||||
<LoadingSpinner size="small" className="text-neutral-400" />
|
||||
</div>
|
||||
) : sessions.length === 0 ? (
|
||||
<p className="py-4 text-center text-sm text-neutral-500">
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
.loader {
|
||||
width: 48px;
|
||||
height: 48px;
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.loader::after,
|
||||
.loader::before {
|
||||
content: "";
|
||||
box-sizing: border-box;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
border-radius: 50%;
|
||||
background: currentColor;
|
||||
position: absolute;
|
||||
left: 0;
|
||||
top: 0;
|
||||
animation: animloader 2s linear infinite;
|
||||
}
|
||||
|
||||
.loader::after {
|
||||
animation-delay: 1s;
|
||||
}
|
||||
|
||||
@keyframes animloader {
|
||||
0% {
|
||||
transform: scale(0);
|
||||
opacity: 1;
|
||||
}
|
||||
100% {
|
||||
transform: scale(1);
|
||||
opacity: 0;
|
||||
}
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import styles from "./ScaleLoader.module.css";
|
||||
|
||||
interface Props {
|
||||
size?: number;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function ScaleLoader({ size = 48, className }: Props) {
|
||||
return (
|
||||
<div
|
||||
className={cn(styles.loader, className)}
|
||||
style={{ width: size, height: size }}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -30,7 +30,7 @@ export function ContentCard({
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"min-w-0 rounded-lg bg-gradient-to-r from-purple-500/30 to-blue-500/30 p-[1px]",
|
||||
"rounded-lg bg-gradient-to-r from-purple-500/30 to-blue-500/30 p-[1px]",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
|
||||
@@ -4,6 +4,7 @@ import { WarningDiamondIcon } from "@phosphor-icons/react";
|
||||
import type { ToolUIPart } from "ai";
|
||||
import { useCopilotChatActions } from "../../components/CopilotChatActionsProvider/useCopilotChatActions";
|
||||
import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation";
|
||||
import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader";
|
||||
import { ProgressBar } from "../../components/ProgressBar/ProgressBar";
|
||||
import {
|
||||
ContentCardDescription,
|
||||
@@ -48,7 +49,12 @@ interface Props {
|
||||
part: CreateAgentToolPart;
|
||||
}
|
||||
|
||||
function getAccordionMeta(output: CreateAgentToolOutput) {
|
||||
function getAccordionMeta(output: CreateAgentToolOutput): {
|
||||
icon: React.ReactNode;
|
||||
title: React.ReactNode;
|
||||
titleClassName?: string;
|
||||
description?: string;
|
||||
} {
|
||||
const icon = <AccordionIcon />;
|
||||
|
||||
if (isAgentSavedOutput(output)) {
|
||||
@@ -67,7 +73,6 @@ function getAccordionMeta(output: CreateAgentToolOutput) {
|
||||
icon,
|
||||
title: "Needs clarification",
|
||||
description: `${questions.length} question${questions.length === 1 ? "" : "s"}`,
|
||||
expanded: true,
|
||||
};
|
||||
}
|
||||
if (
|
||||
@@ -76,7 +81,7 @@ function getAccordionMeta(output: CreateAgentToolOutput) {
|
||||
isOperationInProgressOutput(output)
|
||||
) {
|
||||
return {
|
||||
icon,
|
||||
icon: <OrbitLoader size={32} />,
|
||||
title: "Creating agent, this may take a few minutes. Sit back and relax.",
|
||||
};
|
||||
}
|
||||
@@ -92,23 +97,18 @@ function getAccordionMeta(output: CreateAgentToolOutput) {
|
||||
export function CreateAgentTool({ part }: Props) {
|
||||
const text = getAnimationText(part);
|
||||
const { onSend } = useCopilotChatActions();
|
||||
|
||||
const isStreaming =
|
||||
part.state === "input-streaming" || part.state === "input-available";
|
||||
|
||||
const output = getCreateAgentToolOutput(part);
|
||||
|
||||
const isError =
|
||||
part.state === "output-error" || (!!output && isErrorOutput(output));
|
||||
|
||||
const isOperating =
|
||||
!!output &&
|
||||
(isOperationStartedOutput(output) ||
|
||||
isOperationPendingOutput(output) ||
|
||||
isOperationInProgressOutput(output));
|
||||
|
||||
const progress = useAsymptoticProgress(isOperating);
|
||||
|
||||
const hasExpandableContent =
|
||||
part.state === "output-available" &&
|
||||
!!output &&
|
||||
@@ -149,7 +149,10 @@ export function CreateAgentTool({ part }: Props) {
|
||||
</div>
|
||||
|
||||
{hasExpandableContent && output && (
|
||||
<ToolAccordion {...getAccordionMeta(output)}>
|
||||
<ToolAccordion
|
||||
{...getAccordionMeta(output)}
|
||||
defaultExpanded={isOperating || isClarificationNeededOutput(output)}
|
||||
>
|
||||
{isOperating && (
|
||||
<ContentGrid>
|
||||
<ProgressBar value={progress} className="max-w-[280px]" />
|
||||
|
||||
@@ -146,7 +146,10 @@ export function EditAgentTool({ part }: Props) {
|
||||
</div>
|
||||
|
||||
{hasExpandableContent && output && (
|
||||
<ToolAccordion {...getAccordionMeta(output)}>
|
||||
<ToolAccordion
|
||||
{...getAccordionMeta(output)}
|
||||
defaultExpanded={isOperating || isClarificationNeededOutput(output)}
|
||||
>
|
||||
{isOperating && (
|
||||
<ContentGrid>
|
||||
<ProgressBar value={progress} className="max-w-[280px]" />
|
||||
|
||||
@@ -61,7 +61,14 @@ export function RunAgentTool({ part }: Props) {
|
||||
</div>
|
||||
|
||||
{hasExpandableContent && output && (
|
||||
<ToolAccordion {...getAccordionMeta(output)}>
|
||||
<ToolAccordion
|
||||
{...getAccordionMeta(output)}
|
||||
defaultExpanded={
|
||||
isRunAgentExecutionStartedOutput(output) ||
|
||||
isRunAgentSetupRequirementsOutput(output) ||
|
||||
isRunAgentAgentDetailsOutput(output)
|
||||
}
|
||||
>
|
||||
{isRunAgentExecutionStartedOutput(output) && (
|
||||
<ExecutionStartedCard output={output} />
|
||||
)}
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
WarningDiamondIcon,
|
||||
} from "@phosphor-icons/react";
|
||||
import type { ToolUIPart } from "ai";
|
||||
import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader";
|
||||
import { SpinnerLoader } from "../../components/SpinnerLoader/SpinnerLoader";
|
||||
|
||||
export interface RunAgentInput {
|
||||
username_agent_slug?: string;
|
||||
@@ -171,7 +171,7 @@ export function ToolIcon({
|
||||
);
|
||||
}
|
||||
if (isStreaming) {
|
||||
return <OrbitLoader size={24} />;
|
||||
return <SpinnerLoader size={40} className="text-neutral-700" />;
|
||||
}
|
||||
return <PlayIcon size={14} weight="regular" className="text-neutral-400" />;
|
||||
}
|
||||
@@ -203,7 +203,7 @@ export function getAccordionMeta(output: RunAgentToolOutput): {
|
||||
? output.status.trim()
|
||||
: "started";
|
||||
return {
|
||||
icon,
|
||||
icon: <SpinnerLoader size={28} className="text-neutral-700" />,
|
||||
title: output.graph_name,
|
||||
description: `Status: ${statusText}`,
|
||||
};
|
||||
|
||||
@@ -55,7 +55,13 @@ export function RunBlockTool({ part }: Props) {
|
||||
</div>
|
||||
|
||||
{hasExpandableContent && output && (
|
||||
<ToolAccordion {...getAccordionMeta(output)}>
|
||||
<ToolAccordion
|
||||
{...getAccordionMeta(output)}
|
||||
defaultExpanded={
|
||||
isRunBlockBlockOutput(output) ||
|
||||
isRunBlockSetupRequirementsOutput(output)
|
||||
}
|
||||
>
|
||||
{isRunBlockBlockOutput(output) && <BlockOutputCard output={output} />}
|
||||
|
||||
{isRunBlockSetupRequirementsOutput(output) && (
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
WarningDiamondIcon,
|
||||
} from "@phosphor-icons/react";
|
||||
import type { ToolUIPart } from "ai";
|
||||
import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader";
|
||||
import { SpinnerLoader } from "../../components/SpinnerLoader/SpinnerLoader";
|
||||
|
||||
export interface RunBlockInput {
|
||||
block_id?: string;
|
||||
@@ -120,7 +120,7 @@ export function ToolIcon({
|
||||
);
|
||||
}
|
||||
if (isStreaming) {
|
||||
return <OrbitLoader size={24} />;
|
||||
return <SpinnerLoader size={40} className="text-neutral-700" />;
|
||||
}
|
||||
return <PlayIcon size={14} weight="regular" className="text-neutral-400" />;
|
||||
}
|
||||
@@ -149,7 +149,7 @@ export function getAccordionMeta(output: RunBlockToolOutput): {
|
||||
if (isRunBlockBlockOutput(output)) {
|
||||
const keys = Object.keys(output.outputs ?? {});
|
||||
return {
|
||||
icon,
|
||||
icon: <SpinnerLoader size={32} className="text-neutral-700" />,
|
||||
title: output.block_name,
|
||||
description:
|
||||
keys.length > 0
|
||||
|
||||
@@ -3,6 +3,7 @@ import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { useChat } from "@ai-sdk/react";
|
||||
import { DefaultChatTransport } from "ai";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useMemo, useState } from "react";
|
||||
import { useChatSession } from "./useChatSession";
|
||||
|
||||
@@ -10,6 +11,7 @@ export function useCopilotPage() {
|
||||
const { isUserLoading, isLoggedIn } = useSupabase();
|
||||
const [isDrawerOpen, setIsDrawerOpen] = useState(false);
|
||||
const [pendingMessage, setPendingMessage] = useState<string | null>(null);
|
||||
const router = useRouter();
|
||||
|
||||
const {
|
||||
sessionId,
|
||||
@@ -52,6 +54,10 @@ export function useCopilotPage() {
|
||||
transport: transport ?? undefined,
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!isUserLoading && !isLoggedIn) router.replace("/login");
|
||||
}, [isUserLoading, isLoggedIn]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!hydratedMessages || hydratedMessages.length === 0) return;
|
||||
setMessages((prev) => {
|
||||
|
||||
@@ -0,0 +1,631 @@
|
||||
"use client";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useQueryState } from "nuqs";
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
import {
|
||||
Graph,
|
||||
GraphExecution,
|
||||
GraphExecutionID,
|
||||
GraphExecutionMeta,
|
||||
GraphID,
|
||||
LibraryAgent,
|
||||
LibraryAgentID,
|
||||
LibraryAgentPreset,
|
||||
LibraryAgentPresetID,
|
||||
Schedule,
|
||||
ScheduleID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { exportAsJSONFile } from "@/lib/utils";
|
||||
|
||||
import DeleteConfirmDialog from "@/components/__legacy__/delete-confirm-dialog";
|
||||
import type { ButtonAction } from "@/components/__legacy__/types";
|
||||
import { Button } from "@/components/__legacy__/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/__legacy__/ui/dialog";
|
||||
import LoadingBox, { LoadingSpinner } from "@/components/__legacy__/ui/loading";
|
||||
import {
|
||||
useToast,
|
||||
useToastOnFail,
|
||||
} from "@/components/molecules/Toast/use-toast";
|
||||
import { AgentRunDetailsView } from "./components/agent-run-details-view";
|
||||
import { AgentRunDraftView } from "./components/agent-run-draft-view";
|
||||
import { CreatePresetDialog } from "./components/create-preset-dialog";
|
||||
import { useAgentRunsInfinite } from "./use-agent-runs";
|
||||
import { AgentRunsSelectorList } from "./components/agent-runs-selector-list";
|
||||
import { AgentScheduleDetailsView } from "./components/agent-schedule-details-view";
|
||||
|
||||
export function OldAgentLibraryView() {
|
||||
const { id: agentID }: { id: LibraryAgentID } = useParams();
|
||||
const [executionId, setExecutionId] = useQueryState("executionId");
|
||||
const toastOnFail = useToastOnFail();
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
|
||||
// ============================ STATE =============================
|
||||
|
||||
const [graph, setGraph] = useState<Graph | null>(null); // Graph version corresponding to LibraryAgent
|
||||
const [agent, setAgent] = useState<LibraryAgent | null>(null);
|
||||
const agentRunsQuery = useAgentRunsInfinite(graph?.id); // only runs once graph.id is known
|
||||
const agentRuns = agentRunsQuery.agentRuns;
|
||||
const [agentPresets, setAgentPresets] = useState<LibraryAgentPreset[]>([]);
|
||||
const [schedules, setSchedules] = useState<Schedule[]>([]);
|
||||
const [selectedView, selectView] = useState<
|
||||
| { type: "run"; id?: GraphExecutionID }
|
||||
| { type: "preset"; id: LibraryAgentPresetID }
|
||||
| { type: "schedule"; id: ScheduleID }
|
||||
>({ type: "run" });
|
||||
const [selectedRun, setSelectedRun] = useState<
|
||||
GraphExecution | GraphExecutionMeta | null
|
||||
>(null);
|
||||
const selectedSchedule =
|
||||
selectedView.type == "schedule"
|
||||
? schedules.find((s) => s.id == selectedView.id)
|
||||
: null;
|
||||
const [isFirstLoad, setIsFirstLoad] = useState<boolean>(true);
|
||||
const [agentDeleteDialogOpen, setAgentDeleteDialogOpen] =
|
||||
useState<boolean>(false);
|
||||
const [confirmingDeleteAgentRun, setConfirmingDeleteAgentRun] =
|
||||
useState<GraphExecutionMeta | null>(null);
|
||||
const [confirmingDeleteAgentPreset, setConfirmingDeleteAgentPreset] =
|
||||
useState<LibraryAgentPresetID | null>(null);
|
||||
const [copyAgentDialogOpen, setCopyAgentDialogOpen] = useState(false);
|
||||
const [creatingPresetFromExecutionID, setCreatingPresetFromExecutionID] =
|
||||
useState<GraphExecutionID | null>(null);
|
||||
|
||||
// Set page title with agent name
|
||||
useEffect(() => {
|
||||
if (agent) {
|
||||
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
||||
}
|
||||
}, [agent]);
|
||||
|
||||
const openRunDraftView = useCallback(() => {
|
||||
selectView({ type: "run" });
|
||||
}, []);
|
||||
|
||||
const selectRun = useCallback((id: GraphExecutionID) => {
|
||||
selectView({ type: "run", id });
|
||||
}, []);
|
||||
|
||||
const selectPreset = useCallback((id: LibraryAgentPresetID) => {
|
||||
selectView({ type: "preset", id });
|
||||
}, []);
|
||||
|
||||
const selectSchedule = useCallback((id: ScheduleID) => {
|
||||
selectView({ type: "schedule", id });
|
||||
}, []);
|
||||
|
||||
const graphVersions = useRef<Record<number, Graph>>({});
|
||||
const loadingGraphVersions = useRef<Record<number, Promise<Graph>>>({});
|
||||
const getGraphVersion = useCallback(
|
||||
async (graphID: GraphID, version: number) => {
|
||||
if (version in graphVersions.current)
|
||||
return graphVersions.current[version];
|
||||
if (version in loadingGraphVersions.current)
|
||||
return loadingGraphVersions.current[version];
|
||||
|
||||
const pendingGraph = api.getGraph(graphID, version).then((graph) => {
|
||||
graphVersions.current[version] = graph;
|
||||
return graph;
|
||||
});
|
||||
// Cache promise as well to avoid duplicate requests
|
||||
loadingGraphVersions.current[version] = pendingGraph;
|
||||
return pendingGraph;
|
||||
},
|
||||
[api, graphVersions, loadingGraphVersions],
|
||||
);
|
||||
|
||||
const lastRefresh = useRef<number>(0);
|
||||
const refreshPageData = useCallback(() => {
|
||||
if (Date.now() - lastRefresh.current < 2e3) return; // 2 second debounce
|
||||
lastRefresh.current = Date.now();
|
||||
|
||||
api.getLibraryAgent(agentID).then((agent) => {
|
||||
setAgent(agent);
|
||||
|
||||
getGraphVersion(agent.graph_id, agent.graph_version).then(
|
||||
(_graph) =>
|
||||
(graph && graph.version == _graph.version) || setGraph(_graph),
|
||||
);
|
||||
Promise.all([
|
||||
agentRunsQuery.refetchRuns(),
|
||||
api.listLibraryAgentPresets({
|
||||
graph_id: agent.graph_id,
|
||||
page_size: 100,
|
||||
}),
|
||||
]).then(([runsQueryResult, presets]) => {
|
||||
setAgentPresets(presets.presets);
|
||||
|
||||
const newestAgentRunsResponse = runsQueryResult.data?.pages[0];
|
||||
if (!newestAgentRunsResponse || newestAgentRunsResponse.status != 200)
|
||||
return;
|
||||
const newestAgentRuns = newestAgentRunsResponse.data.executions;
|
||||
// Preload the corresponding graph versions for the latest 10 runs
|
||||
new Set(
|
||||
newestAgentRuns.slice(0, 10).map((run) => run.graph_version),
|
||||
).forEach((version) => getGraphVersion(agent.graph_id, version));
|
||||
});
|
||||
});
|
||||
}, [api, agentID, getGraphVersion, graph]);
|
||||
|
||||
// On first load: select the latest run
|
||||
useEffect(() => {
|
||||
// Only for first load or first execution
|
||||
if (selectedView.id || !isFirstLoad) return;
|
||||
if (agentRuns.length == 0 && agentPresets.length == 0) return;
|
||||
|
||||
setIsFirstLoad(false);
|
||||
if (agentRuns.length > 0) {
|
||||
// select latest run
|
||||
const latestRun = agentRuns.reduce((latest, current) => {
|
||||
if (!latest.started_at && !current.started_at) return latest;
|
||||
if (!latest.started_at) return current;
|
||||
if (!current.started_at) return latest;
|
||||
return latest.started_at > current.started_at ? latest : current;
|
||||
}, agentRuns[0]);
|
||||
selectRun(latestRun.id as GraphExecutionID);
|
||||
} else {
|
||||
// select top preset
|
||||
const latestPreset = agentPresets.toSorted(
|
||||
(a, b) => b.updated_at.getTime() - a.updated_at.getTime(),
|
||||
)[0];
|
||||
selectPreset(latestPreset.id);
|
||||
}
|
||||
}, [
|
||||
isFirstLoad,
|
||||
selectedView.id,
|
||||
agentRuns,
|
||||
agentPresets,
|
||||
selectRun,
|
||||
selectPreset,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (executionId) {
|
||||
selectRun(executionId as GraphExecutionID);
|
||||
setExecutionId(null);
|
||||
}
|
||||
}, [executionId, selectRun, setExecutionId]);
|
||||
|
||||
// Initial load
|
||||
useEffect(() => {
|
||||
refreshPageData();
|
||||
|
||||
// Show a toast when the WebSocket connection disconnects
|
||||
let connectionToast: ReturnType<typeof toast> | null = null;
|
||||
const cancelDisconnectHandler = api.onWebSocketDisconnect(() => {
|
||||
connectionToast ??= toast({
|
||||
title: "Connection to server was lost",
|
||||
variant: "destructive",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Trying to reconnect...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: Infinity,
|
||||
dismissable: true,
|
||||
});
|
||||
});
|
||||
const cancelConnectHandler = api.onWebSocketConnect(() => {
|
||||
if (connectionToast)
|
||||
connectionToast.update({
|
||||
id: connectionToast.id,
|
||||
title: "✅ Connection re-established",
|
||||
variant: "default",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Refreshing data...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: 2000,
|
||||
dismissable: true,
|
||||
});
|
||||
connectionToast = null;
|
||||
});
|
||||
return () => {
|
||||
cancelDisconnectHandler();
|
||||
cancelConnectHandler();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Subscribe to WebSocket updates for agent runs
|
||||
useEffect(() => {
|
||||
if (!agent?.graph_id) return;
|
||||
|
||||
return api.onWebSocketConnect(() => {
|
||||
refreshPageData(); // Sync up on (re)connect
|
||||
|
||||
// Subscribe to all executions for this agent
|
||||
api.subscribeToGraphExecutions(agent.graph_id);
|
||||
});
|
||||
}, [api, agent?.graph_id, refreshPageData]);
|
||||
|
||||
// Handle execution updates
|
||||
useEffect(() => {
|
||||
const detachExecUpdateHandler = api.onWebSocketMessage(
|
||||
"graph_execution_event",
|
||||
(data) => {
|
||||
if (data.graph_id != agent?.graph_id) return;
|
||||
|
||||
agentRunsQuery.upsertAgentRun(data);
|
||||
if (data.id === selectedView.id) {
|
||||
// Update currently viewed run
|
||||
setSelectedRun(data);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return () => {
|
||||
detachExecUpdateHandler();
|
||||
};
|
||||
}, [api, agent?.graph_id, selectedView.id]);
|
||||
|
||||
// Pre-load selectedRun based on selectedView
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id) return;
|
||||
|
||||
const newSelectedRun = agentRuns.find((run) => run.id == selectedView.id);
|
||||
if (selectedView.id !== selectedRun?.id) {
|
||||
// Pull partial data from "cache" while waiting for the rest to load
|
||||
setSelectedRun((newSelectedRun as GraphExecutionMeta) ?? null);
|
||||
}
|
||||
}, [api, selectedView, agentRuns, selectedRun?.id]);
|
||||
|
||||
// Load selectedRun based on selectedView; refresh on agent refresh
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id || !agent) return;
|
||||
|
||||
api
|
||||
.getGraphExecutionInfo(agent.graph_id, selectedView.id)
|
||||
.then(async (run) => {
|
||||
// Ensure corresponding graph version is available before rendering I/O
|
||||
await getGraphVersion(run.graph_id, run.graph_version);
|
||||
setSelectedRun(run);
|
||||
});
|
||||
}, [api, selectedView, agent, getGraphVersion]);
|
||||
|
||||
const fetchSchedules = useCallback(async () => {
|
||||
if (!agent) return;
|
||||
|
||||
setSchedules(await api.listGraphExecutionSchedules(agent.graph_id));
|
||||
}, [api, agent?.graph_id]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchSchedules();
|
||||
}, [fetchSchedules]);
|
||||
|
||||
// =========================== ACTIONS ============================
|
||||
|
||||
const deleteRun = useCallback(
|
||||
async (run: GraphExecutionMeta) => {
|
||||
if (run.status == "RUNNING" || run.status == "QUEUED") {
|
||||
await api.stopGraphExecution(run.graph_id, run.id);
|
||||
}
|
||||
await api.deleteGraphExecution(run.id);
|
||||
|
||||
setConfirmingDeleteAgentRun(null);
|
||||
if (selectedView.type == "run" && selectedView.id == run.id) {
|
||||
openRunDraftView();
|
||||
}
|
||||
agentRunsQuery.removeAgentRun(run.id);
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deletePreset = useCallback(
|
||||
async (presetID: LibraryAgentPresetID) => {
|
||||
await api.deleteLibraryAgentPreset(presetID);
|
||||
|
||||
setConfirmingDeleteAgentPreset(null);
|
||||
if (selectedView.type == "preset" && selectedView.id == presetID) {
|
||||
openRunDraftView();
|
||||
}
|
||||
setAgentPresets((presets) => presets.filter((p) => p.id !== presetID));
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deleteSchedule = useCallback(
|
||||
async (scheduleID: ScheduleID) => {
|
||||
const removedSchedule =
|
||||
await api.deleteGraphExecutionSchedule(scheduleID);
|
||||
|
||||
setSchedules((schedules) => {
|
||||
const newSchedules = schedules.filter(
|
||||
(s) => s.id !== removedSchedule.id,
|
||||
);
|
||||
if (
|
||||
selectedView.type == "schedule" &&
|
||||
selectedView.id == removedSchedule.id
|
||||
) {
|
||||
if (newSchedules.length > 0) {
|
||||
// Select next schedule if available
|
||||
selectSchedule(newSchedules[0].id);
|
||||
} else {
|
||||
// Reset to draft view if current schedule was deleted
|
||||
openRunDraftView();
|
||||
}
|
||||
}
|
||||
return newSchedules;
|
||||
});
|
||||
openRunDraftView();
|
||||
},
|
||||
[schedules, api],
|
||||
);
|
||||
|
||||
const handleCreatePresetFromRun = useCallback(
|
||||
async (name: string, description: string) => {
|
||||
if (!creatingPresetFromExecutionID) return;
|
||||
|
||||
await api
|
||||
.createLibraryAgentPreset({
|
||||
name,
|
||||
description,
|
||||
graph_execution_id: creatingPresetFromExecutionID,
|
||||
})
|
||||
.then((preset) => {
|
||||
setAgentPresets((prev) => [...prev, preset]);
|
||||
selectPreset(preset.id);
|
||||
setCreatingPresetFromExecutionID(null);
|
||||
})
|
||||
.catch(toastOnFail("create a preset"));
|
||||
},
|
||||
[api, creatingPresetFromExecutionID, selectPreset, toast],
|
||||
);
|
||||
|
||||
const downloadGraph = useCallback(
|
||||
async () =>
|
||||
agent &&
|
||||
// Export sanitized graph from backend
|
||||
api
|
||||
.getGraph(agent.graph_id, agent.graph_version, true)
|
||||
.then((graph) =>
|
||||
exportAsJSONFile(graph, `${graph.name}_v${graph.version}.json`),
|
||||
),
|
||||
[api, agent],
|
||||
);
|
||||
|
||||
const copyAgent = useCallback(async () => {
|
||||
setCopyAgentDialogOpen(false);
|
||||
api
|
||||
.forkLibraryAgent(agentID)
|
||||
.then((newAgent) => {
|
||||
router.push(`/library/agents/${newAgent.id}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error copying agent:", error);
|
||||
toast({
|
||||
title: "Error copying agent",
|
||||
description: `An error occurred while copying the agent: ${error.message}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
});
|
||||
}, [agentID, api, router, toast]);
|
||||
|
||||
const agentActions: ButtonAction[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: "Customize agent",
|
||||
href: `/build?flowID=${agent?.graph_id}&flowVersion=${agent?.graph_version}`,
|
||||
disabled: !agent?.can_access_graph,
|
||||
},
|
||||
{ label: "Export agent to file", callback: downloadGraph },
|
||||
...(!agent?.can_access_graph
|
||||
? [
|
||||
{
|
||||
label: "Edit a copy",
|
||||
callback: () => setCopyAgentDialogOpen(true),
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: "Delete agent",
|
||||
callback: () => setAgentDeleteDialogOpen(true),
|
||||
},
|
||||
],
|
||||
[agent, downloadGraph],
|
||||
);
|
||||
|
||||
const runGraph =
|
||||
graphVersions.current[selectedRun?.graph_version ?? 0] ?? graph;
|
||||
|
||||
const onCreateSchedule = useCallback(
|
||||
(schedule: Schedule) => {
|
||||
setSchedules((prev) => [...prev, schedule]);
|
||||
selectSchedule(schedule.id);
|
||||
},
|
||||
[selectView],
|
||||
);
|
||||
|
||||
const onCreatePreset = useCallback(
|
||||
(preset: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) => [...prev, preset]);
|
||||
selectPreset(preset.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
const onUpdatePreset = useCallback(
|
||||
(updated: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) =>
|
||||
prev.map((p) => (p.id === updated.id ? updated : p)),
|
||||
);
|
||||
selectPreset(updated.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
if (!agent || !graph) {
|
||||
return <LoadingBox className="h-[90vh]" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container justify-stretch p-0 pt-16 lg:flex">
|
||||
{/* Sidebar w/ list of runs */}
|
||||
{/* TODO: render this below header in sm and md layouts */}
|
||||
<AgentRunsSelectorList
|
||||
className="agpt-div w-full border-b pb-2 lg:w-auto lg:border-b-0 lg:border-r lg:pb-0"
|
||||
agent={agent}
|
||||
agentRunsQuery={agentRunsQuery}
|
||||
agentPresets={agentPresets}
|
||||
schedules={schedules}
|
||||
selectedView={selectedView}
|
||||
onSelectRun={selectRun}
|
||||
onSelectPreset={selectPreset}
|
||||
onSelectSchedule={selectSchedule}
|
||||
onSelectDraftNewRun={openRunDraftView}
|
||||
doDeleteRun={setConfirmingDeleteAgentRun}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
doCreatePresetFromRun={setCreatingPresetFromExecutionID}
|
||||
/>
|
||||
|
||||
<div className="flex-1">
|
||||
{/* Header */}
|
||||
<div className="agpt-div w-full border-b">
|
||||
<h1
|
||||
data-testid="agent-title"
|
||||
className="font-poppins text-3xl font-medium"
|
||||
>
|
||||
{
|
||||
agent.name /* TODO: use dynamic/custom run title - https://github.com/Significant-Gravitas/AutoGPT/issues/9184 */
|
||||
}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
{/* Run / Schedule views */}
|
||||
{(selectedView.type == "run" && selectedView.id ? (
|
||||
selectedRun && runGraph ? (
|
||||
<AgentRunDetailsView
|
||||
agent={agent}
|
||||
graph={runGraph}
|
||||
run={selectedRun}
|
||||
agentActions={agentActions}
|
||||
onRun={selectRun}
|
||||
doDeleteRun={() => setConfirmingDeleteAgentRun(selectedRun)}
|
||||
doCreatePresetFromRun={() =>
|
||||
setCreatingPresetFromExecutionID(selectedRun.id)
|
||||
}
|
||||
/>
|
||||
) : null
|
||||
) : selectedView.type == "run" ? (
|
||||
/* Draft new runs / Create new presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
onRun={selectRun}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onCreatePreset={onCreatePreset}
|
||||
agentActions={agentActions}
|
||||
recommendedScheduleCron={agent?.recommended_schedule_cron || null}
|
||||
/>
|
||||
) : selectedView.type == "preset" ? (
|
||||
/* Edit & update presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
agentPreset={
|
||||
agentPresets.find((preset) => preset.id == selectedView.id)!
|
||||
}
|
||||
onRun={selectRun}
|
||||
recommendedScheduleCron={agent?.recommended_schedule_cron || null}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onUpdatePreset={onUpdatePreset}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
agentActions={agentActions}
|
||||
/>
|
||||
) : selectedView.type == "schedule" ? (
|
||||
selectedSchedule &&
|
||||
graph && (
|
||||
<AgentScheduleDetailsView
|
||||
graph={graph}
|
||||
schedule={selectedSchedule}
|
||||
// agent={agent}
|
||||
agentActions={agentActions}
|
||||
onForcedRun={selectRun}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
/>
|
||||
)
|
||||
) : null) || <LoadingBox className="h-[70vh]" />}
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent"
|
||||
open={agentDeleteDialogOpen}
|
||||
onOpenChange={setAgentDeleteDialogOpen}
|
||||
onDoDelete={() =>
|
||||
agent &&
|
||||
api.deleteLibraryAgent(agent.id).then(() => router.push("/library"))
|
||||
}
|
||||
/>
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent run"
|
||||
open={!!confirmingDeleteAgentRun}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentRun(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentRun && deleteRun(confirmingDeleteAgentRun)
|
||||
}
|
||||
/>
|
||||
<DeleteConfirmDialog
|
||||
entityType={agent.has_external_trigger ? "trigger" : "agent preset"}
|
||||
open={!!confirmingDeleteAgentPreset}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentPreset(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentPreset &&
|
||||
deletePreset(confirmingDeleteAgentPreset)
|
||||
}
|
||||
/>
|
||||
{/* Copy agent confirmation dialog */}
|
||||
<Dialog
|
||||
onOpenChange={setCopyAgentDialogOpen}
|
||||
open={copyAgentDialogOpen}
|
||||
>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>You're making an editable copy</DialogTitle>
|
||||
<DialogDescription className="pt-2">
|
||||
The original Marketplace agent stays the same and cannot be
|
||||
edited. We'll save a new version of this agent to your
|
||||
Library. From there, you can customize it however you'd
|
||||
like by clicking "Customize agent" — this will open
|
||||
the builder where you can see and modify the inner workings.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter className="justify-end">
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={() => setCopyAgentDialogOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="button" onClick={copyAgent}>
|
||||
Continue
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
<CreatePresetDialog
|
||||
open={!!creatingPresetFromExecutionID}
|
||||
onOpenChange={() => setCreatingPresetFromExecutionID(null)}
|
||||
onConfirm={handleCreatePresetFromRun}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,445 @@
|
||||
"use client";
|
||||
import { format, formatDistanceToNow, formatDistanceStrict } from "date-fns";
|
||||
import React, { useCallback, useMemo, useEffect } from "react";
|
||||
|
||||
import {
|
||||
Graph,
|
||||
GraphExecution,
|
||||
GraphExecutionID,
|
||||
GraphExecutionMeta,
|
||||
LibraryAgent,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
|
||||
import ActionButtonGroup from "@/components/__legacy__/action-button-group";
|
||||
import type { ButtonAction } from "@/components/__legacy__/types";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/__legacy__/ui/card";
|
||||
import {
|
||||
IconRefresh,
|
||||
IconSquare,
|
||||
IconCircleAlert,
|
||||
} from "@/components/__legacy__/ui/icons";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import LoadingBox from "@/components/__legacy__/ui/loading";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import { useToastOnFail } from "@/components/molecules/Toast/use-toast";
|
||||
|
||||
import { AgentRunStatus, agentRunStatusMap } from "./agent-run-status-chip";
|
||||
import useCredits from "@/hooks/useCredits";
|
||||
import { AgentRunOutputView } from "./agent-run-output-view";
|
||||
import { analytics } from "@/services/analytics";
|
||||
import { PendingReviewsList } from "@/components/organisms/PendingReviewsList/PendingReviewsList";
|
||||
import { usePendingReviewsForExecution } from "@/hooks/usePendingReviews";
|
||||
|
||||
export function AgentRunDetailsView({
|
||||
agent,
|
||||
graph,
|
||||
run,
|
||||
agentActions,
|
||||
onRun,
|
||||
doDeleteRun,
|
||||
doCreatePresetFromRun,
|
||||
}: {
|
||||
agent: LibraryAgent;
|
||||
graph: Graph;
|
||||
run: GraphExecution | GraphExecutionMeta;
|
||||
agentActions: ButtonAction[];
|
||||
onRun: (runID: GraphExecutionID) => void;
|
||||
doDeleteRun: () => void;
|
||||
doCreatePresetFromRun: () => void;
|
||||
}): React.ReactNode {
|
||||
const api = useBackendAPI();
|
||||
const { formatCredits } = useCredits();
|
||||
|
||||
const runStatus: AgentRunStatus = useMemo(
|
||||
() => agentRunStatusMap[run.status],
|
||||
[run],
|
||||
);
|
||||
|
||||
const {
|
||||
pendingReviews,
|
||||
isLoading: reviewsLoading,
|
||||
refetch: refetchReviews,
|
||||
} = usePendingReviewsForExecution(run.id);
|
||||
|
||||
const toastOnFail = useToastOnFail();
|
||||
|
||||
// Refetch pending reviews when execution status changes to REVIEW
|
||||
useEffect(() => {
|
||||
if (runStatus === "review" && run.id) {
|
||||
refetchReviews();
|
||||
}
|
||||
}, [runStatus, run.id, refetchReviews]);
|
||||
|
||||
const infoStats: { label: string; value: React.ReactNode }[] = useMemo(() => {
|
||||
if (!run) return [];
|
||||
return [
|
||||
{
|
||||
label: "Status",
|
||||
value: runStatus.charAt(0).toUpperCase() + runStatus.slice(1),
|
||||
},
|
||||
{
|
||||
label: "Started",
|
||||
value: run.started_at
|
||||
? `${formatDistanceToNow(run.started_at, { addSuffix: true })}, ${format(run.started_at, "HH:mm")}`
|
||||
: "—",
|
||||
},
|
||||
...(run.stats
|
||||
? [
|
||||
{
|
||||
label: "Duration",
|
||||
value: formatDistanceStrict(0, run.stats.duration * 1000),
|
||||
},
|
||||
{ label: "Steps", value: run.stats.node_exec_count },
|
||||
{ label: "Cost", value: formatCredits(run.stats.cost) },
|
||||
]
|
||||
: []),
|
||||
];
|
||||
}, [run, runStatus, formatCredits]);
|
||||
|
||||
const agentRunInputs:
|
||||
| Record<
|
||||
string,
|
||||
{
|
||||
title?: string;
|
||||
/* type: BlockIOSubType; */
|
||||
value: string | number | undefined;
|
||||
}
|
||||
>
|
||||
| undefined = useMemo(() => {
|
||||
if (!run.inputs) return undefined;
|
||||
// TODO: show (link to) preset - https://github.com/Significant-Gravitas/AutoGPT/issues/9168
|
||||
|
||||
// Add type info from agent input schema
|
||||
return Object.fromEntries(
|
||||
Object.entries(run.inputs).map(([k, v]) => [
|
||||
k,
|
||||
{
|
||||
title: graph.input_schema.properties[k]?.title,
|
||||
// type: graph.input_schema.properties[k].type, // TODO: implement typed graph inputs
|
||||
value: typeof v == "object" ? JSON.stringify(v, undefined, 2) : v,
|
||||
},
|
||||
]),
|
||||
);
|
||||
}, [graph, run]);
|
||||
|
||||
const runAgain = useCallback(() => {
|
||||
if (
|
||||
!run.inputs ||
|
||||
!(graph.credentials_input_schema?.required ?? []).every(
|
||||
(k) => k in (run.credential_inputs ?? {}),
|
||||
)
|
||||
)
|
||||
return;
|
||||
|
||||
if (run.preset_id) {
|
||||
return api
|
||||
.executeLibraryAgentPreset(
|
||||
run.preset_id,
|
||||
run.inputs!,
|
||||
run.credential_inputs!,
|
||||
)
|
||||
.then(({ id }) => {
|
||||
analytics.sendDatafastEvent("run_agent", {
|
||||
name: graph.name,
|
||||
id: graph.id,
|
||||
});
|
||||
onRun(id);
|
||||
})
|
||||
.catch(toastOnFail("execute agent preset"));
|
||||
}
|
||||
|
||||
return api
|
||||
.executeGraph(
|
||||
graph.id,
|
||||
graph.version,
|
||||
run.inputs!,
|
||||
run.credential_inputs!,
|
||||
"library",
|
||||
)
|
||||
.then(({ id }) => {
|
||||
analytics.sendDatafastEvent("run_agent", {
|
||||
name: graph.name,
|
||||
id: graph.id,
|
||||
});
|
||||
onRun(id);
|
||||
})
|
||||
.catch(toastOnFail("execute agent"));
|
||||
}, [api, graph, run, onRun, toastOnFail]);
|
||||
|
||||
const stopRun = useCallback(
|
||||
() => api.stopGraphExecution(graph.id, run.id),
|
||||
[api, graph.id, run.id],
|
||||
);
|
||||
|
||||
const agentRunOutputs:
|
||||
| Record<
|
||||
string,
|
||||
{
|
||||
title?: string;
|
||||
/* type: BlockIOSubType; */
|
||||
values: Array<React.ReactNode>;
|
||||
}
|
||||
>
|
||||
| null
|
||||
| undefined = useMemo(() => {
|
||||
if (!("outputs" in run)) return undefined;
|
||||
if (!["running", "success", "failed", "stopped"].includes(runStatus))
|
||||
return null;
|
||||
|
||||
// Add type info from agent input schema
|
||||
return Object.fromEntries(
|
||||
Object.entries(run.outputs).map(([k, vv]) => [
|
||||
k,
|
||||
{
|
||||
title: graph.output_schema.properties[k].title,
|
||||
/* type: agent.output_schema.properties[k].type */
|
||||
values: vv.map((v) =>
|
||||
typeof v == "object" ? JSON.stringify(v, undefined, 2) : v,
|
||||
),
|
||||
},
|
||||
]),
|
||||
);
|
||||
}, [graph, run, runStatus]);
|
||||
|
||||
const runActions: ButtonAction[] = useMemo(
|
||||
() => [
|
||||
...(["running", "queued"].includes(runStatus)
|
||||
? ([
|
||||
{
|
||||
label: (
|
||||
<>
|
||||
<IconSquare className="mr-2 size-4" />
|
||||
Stop run
|
||||
</>
|
||||
),
|
||||
variant: "secondary",
|
||||
callback: stopRun,
|
||||
},
|
||||
] satisfies ButtonAction[])
|
||||
: []),
|
||||
...(["success", "failed", "stopped"].includes(runStatus) &&
|
||||
!graph.has_external_trigger &&
|
||||
(graph.credentials_input_schema?.required ?? []).every(
|
||||
(k) => k in (run.credential_inputs ?? {}),
|
||||
)
|
||||
? [
|
||||
{
|
||||
label: (
|
||||
<>
|
||||
<IconRefresh className="mr-2 size-4" />
|
||||
Run again
|
||||
</>
|
||||
),
|
||||
callback: runAgain,
|
||||
dataTestId: "run-again-button",
|
||||
},
|
||||
]
|
||||
: []),
|
||||
...(agent.can_access_graph
|
||||
? [
|
||||
{
|
||||
label: "Open run in builder",
|
||||
href: `/build?flowID=${run.graph_id}&flowVersion=${run.graph_version}&flowExecutionID=${run.id}`,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{ label: "Create preset from run", callback: doCreatePresetFromRun },
|
||||
{ label: "Delete run", variant: "secondary", callback: doDeleteRun },
|
||||
],
|
||||
[
|
||||
runStatus,
|
||||
runAgain,
|
||||
stopRun,
|
||||
doDeleteRun,
|
||||
doCreatePresetFromRun,
|
||||
graph.has_external_trigger,
|
||||
graph.credentials_input_schema?.required,
|
||||
agent.can_access_graph,
|
||||
run.graph_id,
|
||||
run.graph_version,
|
||||
run.id,
|
||||
],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="agpt-div flex gap-6">
|
||||
<div className="flex flex-1 flex-col gap-4">
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">Info</CardTitle>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent>
|
||||
<div className="flex justify-stretch gap-4">
|
||||
{infoStats.map(({ label, value }) => (
|
||||
<div key={label} className="flex-1">
|
||||
<p className="text-sm font-medium text-black">{label}</p>
|
||||
<p className="text-sm text-neutral-600">{value}</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
{run.status === "FAILED" && (
|
||||
<div className="mt-4 rounded-md border border-red-200 bg-red-50 p-3 dark:border-red-800 dark:bg-red-900/20">
|
||||
<p className="text-sm text-red-800 dark:text-red-200">
|
||||
<strong>Error:</strong>{" "}
|
||||
{run.stats?.error ||
|
||||
"The execution failed due to an internal error. You can re-run the agent to retry."}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
{/* Smart Agent Execution Summary */}
|
||||
{run.stats?.activity_status && (
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="flex items-center gap-2 font-poppins text-lg">
|
||||
Task Summary
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<IconCircleAlert className="size-4 cursor-help text-neutral-500 hover:text-neutral-700" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p className="max-w-xs">
|
||||
This AI-generated summary describes how the agent
|
||||
handled your task. It’s an experimental feature and may
|
||||
occasionally be inaccurate.
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-4">
|
||||
<p className="text-sm leading-relaxed text-neutral-700">
|
||||
{run.stats.activity_status}
|
||||
</p>
|
||||
|
||||
{/* Correctness Score */}
|
||||
{typeof run.stats.correctness_score === "number" && (
|
||||
<div className="flex items-center gap-3 rounded-lg bg-neutral-50 p-3">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-neutral-600">
|
||||
Success Estimate:
|
||||
</span>
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="relative h-2 w-16 overflow-hidden rounded-full bg-neutral-200">
|
||||
<div
|
||||
className={`h-full transition-all ${
|
||||
run.stats.correctness_score >= 0.8
|
||||
? "bg-green-500"
|
||||
: run.stats.correctness_score >= 0.6
|
||||
? "bg-yellow-500"
|
||||
: run.stats.correctness_score >= 0.4
|
||||
? "bg-orange-500"
|
||||
: "bg-red-500"
|
||||
}`}
|
||||
style={{
|
||||
width: `${Math.round(run.stats.correctness_score * 100)}%`,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<span className="text-sm font-medium">
|
||||
{Math.round(run.stats.correctness_score * 100)}%
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<IconCircleAlert className="size-4 cursor-help text-neutral-400 hover:text-neutral-600" />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p className="max-w-xs">
|
||||
AI-generated estimate of how well this execution
|
||||
achieved its intended purpose. This score indicates
|
||||
{run.stats.correctness_score >= 0.8
|
||||
? " the agent was highly successful."
|
||||
: run.stats.correctness_score >= 0.6
|
||||
? " the agent was mostly successful with minor issues."
|
||||
: run.stats.correctness_score >= 0.4
|
||||
? " the agent was partially successful with some gaps."
|
||||
: " the agent had limited success with significant issues."}
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{agentRunOutputs !== null && (
|
||||
<AgentRunOutputView agentRunOutputs={agentRunOutputs} />
|
||||
)}
|
||||
|
||||
{/* Pending Reviews Section */}
|
||||
{runStatus === "review" && (
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">
|
||||
Pending Reviews ({pendingReviews.length})
|
||||
</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
{reviewsLoading ? (
|
||||
<LoadingBox spinnerSize={12} className="h-24" />
|
||||
) : pendingReviews.length > 0 ? (
|
||||
<PendingReviewsList
|
||||
reviews={pendingReviews}
|
||||
onReviewComplete={refetchReviews}
|
||||
emptyMessage="No pending reviews for this execution"
|
||||
/>
|
||||
) : (
|
||||
<div className="py-4 text-neutral-600">
|
||||
No pending reviews for this execution
|
||||
</div>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">Input</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="flex flex-col gap-4">
|
||||
{agentRunInputs !== undefined ? (
|
||||
Object.entries(agentRunInputs).map(([key, { title, value }]) => (
|
||||
<div key={key} className="flex flex-col gap-1.5">
|
||||
<label className="text-sm font-medium">{title || key}</label>
|
||||
<Input value={value} className="rounded-full" disabled />
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<LoadingBox spinnerSize={12} className="h-24" />
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{/* Run / Agent Actions */}
|
||||
<aside className="w-48 xl:w-56">
|
||||
<div className="flex flex-col gap-8">
|
||||
<ActionButtonGroup title="Run actions" actions={runActions} />
|
||||
|
||||
<ActionButtonGroup title="Agent actions" actions={agentActions} />
|
||||
</div>
|
||||
</aside>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
|
||||
import { RunAgentInputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs";
|
||||
import { ScheduleTaskDialog } from "@/components/contextual/CronScheduler/cron-scheduler-dialog";
|
||||
import { ScheduleTaskDialog } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog";
|
||||
import ActionButtonGroup from "@/components/__legacy__/action-button-group";
|
||||
import type { ButtonAction } from "@/components/__legacy__/types";
|
||||
import {
|
||||
@@ -53,10 +53,7 @@ import { ClockIcon, CopyIcon, InfoIcon } from "@phosphor-icons/react";
|
||||
import { CalendarClockIcon, Trash2Icon } from "lucide-react";
|
||||
|
||||
import { analytics } from "@/services/analytics";
|
||||
import {
|
||||
AgentStatus,
|
||||
AgentStatusChip,
|
||||
} from "@/app/(platform)/build/components/legacy-builder/agent-status-chip";
|
||||
import { AgentStatus, AgentStatusChip } from "./agent-status-chip";
|
||||
|
||||
export function AgentRunDraftView({
|
||||
graph,
|
||||
@@ -0,0 +1,178 @@
|
||||
"use client";
|
||||
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import React, { useMemo } from "react";
|
||||
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/__legacy__/ui/card";
|
||||
|
||||
import LoadingBox from "@/components/__legacy__/ui/loading";
|
||||
import type { OutputMetadata } from "../../../../../../../../components/contextual/OutputRenderers";
|
||||
import {
|
||||
globalRegistry,
|
||||
OutputActions,
|
||||
OutputItem,
|
||||
} from "../../../../../../../../components/contextual/OutputRenderers";
|
||||
|
||||
export function AgentRunOutputView({
|
||||
agentRunOutputs,
|
||||
}: {
|
||||
agentRunOutputs:
|
||||
| Record<
|
||||
string,
|
||||
{
|
||||
title?: string;
|
||||
/* type: BlockIOSubType; */
|
||||
values: Array<React.ReactNode>;
|
||||
}
|
||||
>
|
||||
| undefined;
|
||||
}) {
|
||||
const enableEnhancedOutputHandling = useGetFlag(
|
||||
Flag.ENABLE_ENHANCED_OUTPUT_HANDLING,
|
||||
);
|
||||
|
||||
// Prepare items for the renderer system
|
||||
const outputItems = useMemo(() => {
|
||||
if (!agentRunOutputs) return [];
|
||||
|
||||
const items: Array<{
|
||||
key: string;
|
||||
label: string;
|
||||
value: unknown;
|
||||
metadata?: OutputMetadata;
|
||||
renderer: any;
|
||||
}> = [];
|
||||
|
||||
Object.entries(agentRunOutputs).forEach(([key, { title, values }]) => {
|
||||
values.forEach((value, index) => {
|
||||
// Enhanced metadata extraction
|
||||
const metadata: OutputMetadata = {};
|
||||
|
||||
// Type guard to safely access properties
|
||||
if (
|
||||
typeof value === "object" &&
|
||||
value !== null &&
|
||||
!React.isValidElement(value)
|
||||
) {
|
||||
const objValue = value as any;
|
||||
if (objValue.type) metadata.type = objValue.type;
|
||||
if (objValue.mimeType) metadata.mimeType = objValue.mimeType;
|
||||
if (objValue.filename) metadata.filename = objValue.filename;
|
||||
}
|
||||
|
||||
const renderer = globalRegistry.getRenderer(value, metadata);
|
||||
if (renderer) {
|
||||
items.push({
|
||||
key: `${key}-${index}`,
|
||||
label: index === 0 ? title || key : "",
|
||||
value,
|
||||
metadata,
|
||||
renderer,
|
||||
});
|
||||
} else {
|
||||
const textRenderer = globalRegistry
|
||||
.getAllRenderers()
|
||||
.find((r) => r.name === "TextRenderer");
|
||||
if (textRenderer) {
|
||||
items.push({
|
||||
key: `${key}-${index}`,
|
||||
label: index === 0 ? title || key : "",
|
||||
value: JSON.stringify(value, null, 2),
|
||||
metadata,
|
||||
renderer: textRenderer,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return items;
|
||||
}, [agentRunOutputs]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{enableEnhancedOutputHandling ? (
|
||||
<Card className="agpt-box" style={{ maxWidth: "950px" }}>
|
||||
<CardHeader>
|
||||
<div className="flex items-center justify-between">
|
||||
<CardTitle className="font-poppins text-lg">Output</CardTitle>
|
||||
{outputItems.length > 0 && (
|
||||
<OutputActions
|
||||
items={outputItems.map((item) => ({
|
||||
value: item.value,
|
||||
metadata: item.metadata,
|
||||
renderer: item.renderer,
|
||||
}))}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent
|
||||
className="flex flex-col gap-4"
|
||||
style={{ maxWidth: "660px" }}
|
||||
>
|
||||
{agentRunOutputs !== undefined ? (
|
||||
outputItems.length > 0 ? (
|
||||
outputItems.map((item) => (
|
||||
<OutputItem
|
||||
key={item.key}
|
||||
value={item.value}
|
||||
metadata={item.metadata}
|
||||
renderer={item.renderer}
|
||||
label={item.label}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<p className="text-sm text-muted-foreground">
|
||||
No outputs to display
|
||||
</p>
|
||||
)
|
||||
) : (
|
||||
<LoadingBox spinnerSize={12} className="h-24" />
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
) : (
|
||||
<Card className="agpt-box" style={{ maxWidth: "950px" }}>
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">Output</CardTitle>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent
|
||||
className="flex flex-col gap-4"
|
||||
style={{ maxWidth: "660px" }}
|
||||
>
|
||||
{agentRunOutputs !== undefined ? (
|
||||
Object.entries(agentRunOutputs).map(
|
||||
([key, { title, values }]) => (
|
||||
<div key={key} className="flex flex-col gap-1.5">
|
||||
<label className="text-sm font-medium">
|
||||
{title || key}
|
||||
</label>
|
||||
{values.map((value, i) => (
|
||||
<p
|
||||
className="resize-none overflow-x-auto whitespace-pre-wrap break-words border-none text-sm text-neutral-700 disabled:cursor-not-allowed"
|
||||
key={i}
|
||||
>
|
||||
{value}
|
||||
</p>
|
||||
))}
|
||||
{/* TODO: pretty type-dependent rendering */}
|
||||
</div>
|
||||
),
|
||||
)
|
||||
) : (
|
||||
<LoadingBox spinnerSize={12} className="h-24" />
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import React from "react";
|
||||
|
||||
import { Badge } from "@/components/__legacy__/ui/badge";
|
||||
|
||||
import { GraphExecutionMeta } from "@/lib/autogpt-server-api/types";
|
||||
|
||||
export type AgentRunStatus =
|
||||
| "success"
|
||||
| "failed"
|
||||
| "queued"
|
||||
| "running"
|
||||
| "stopped"
|
||||
| "scheduled"
|
||||
| "draft"
|
||||
| "review";
|
||||
|
||||
export const agentRunStatusMap: Record<
|
||||
GraphExecutionMeta["status"],
|
||||
AgentRunStatus
|
||||
> = {
|
||||
INCOMPLETE: "draft",
|
||||
COMPLETED: "success",
|
||||
FAILED: "failed",
|
||||
QUEUED: "queued",
|
||||
RUNNING: "running",
|
||||
TERMINATED: "stopped",
|
||||
REVIEW: "review",
|
||||
};
|
||||
|
||||
const statusData: Record<
|
||||
AgentRunStatus,
|
||||
{ label: string; variant: keyof typeof statusStyles }
|
||||
> = {
|
||||
success: { label: "Success", variant: "success" },
|
||||
running: { label: "Running", variant: "info" },
|
||||
failed: { label: "Failed", variant: "destructive" },
|
||||
queued: { label: "Queued", variant: "warning" },
|
||||
draft: { label: "Draft", variant: "secondary" },
|
||||
stopped: { label: "Stopped", variant: "secondary" },
|
||||
scheduled: { label: "Scheduled", variant: "secondary" },
|
||||
review: { label: "In Review", variant: "warning" },
|
||||
};
|
||||
|
||||
const statusStyles = {
|
||||
success:
|
||||
"bg-green-100 text-green-800 hover:bg-green-100 hover:text-green-800",
|
||||
destructive: "bg-red-100 text-red-800 hover:bg-red-100 hover:text-red-800",
|
||||
warning:
|
||||
"bg-yellow-100 text-yellow-800 hover:bg-yellow-100 hover:text-yellow-800",
|
||||
info: "bg-blue-100 text-blue-800 hover:bg-blue-100 hover:text-blue-800",
|
||||
secondary:
|
||||
"bg-slate-100 text-slate-800 hover:bg-slate-100 hover:text-slate-800",
|
||||
};
|
||||
|
||||
export function AgentRunStatusChip({
|
||||
status,
|
||||
}: {
|
||||
status: AgentRunStatus;
|
||||
}): React.ReactElement {
|
||||
return (
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className={`text-xs font-medium ${statusStyles[statusData[status]?.variant]} rounded-[45px] px-[9px] py-[3px]`}
|
||||
>
|
||||
{statusData[status]?.label}
|
||||
</Badge>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
import React from "react";
|
||||
import { formatDistanceToNow, isPast } from "date-fns";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
import { Link2Icon, Link2OffIcon, MoreVertical } from "lucide-react";
|
||||
import { Card, CardContent } from "@/components/__legacy__/ui/card";
|
||||
import { Button } from "@/components/__legacy__/ui/button";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/__legacy__/ui/dropdown-menu";
|
||||
|
||||
import { AgentStatus, AgentStatusChip } from "./agent-status-chip";
|
||||
import { AgentRunStatus, AgentRunStatusChip } from "./agent-run-status-chip";
|
||||
import { PushPinSimpleIcon } from "@phosphor-icons/react";
|
||||
|
||||
export type AgentRunSummaryProps = (
|
||||
| {
|
||||
type: "run";
|
||||
status: AgentRunStatus;
|
||||
}
|
||||
| {
|
||||
type: "preset";
|
||||
status?: undefined;
|
||||
}
|
||||
| {
|
||||
type: "preset.triggered";
|
||||
status: AgentStatus;
|
||||
}
|
||||
| {
|
||||
type: "schedule";
|
||||
status: "scheduled";
|
||||
}
|
||||
) & {
|
||||
title: string;
|
||||
timestamp?: number | Date;
|
||||
selected?: boolean;
|
||||
onClick?: () => void;
|
||||
// onRename: () => void;
|
||||
onDelete: () => void;
|
||||
onPinAsPreset?: () => void;
|
||||
className?: string;
|
||||
};
|
||||
|
||||
export function AgentRunSummaryCard({
|
||||
type,
|
||||
status,
|
||||
title,
|
||||
timestamp,
|
||||
selected = false,
|
||||
onClick,
|
||||
// onRename,
|
||||
onDelete,
|
||||
onPinAsPreset,
|
||||
className,
|
||||
}: AgentRunSummaryProps): React.ReactElement {
|
||||
return (
|
||||
<Card
|
||||
className={cn(
|
||||
"agpt-rounded-card cursor-pointer border-zinc-300",
|
||||
selected ? "agpt-card-selected" : "",
|
||||
className,
|
||||
)}
|
||||
onClick={onClick}
|
||||
>
|
||||
<CardContent className="relative p-2.5 lg:p-4">
|
||||
{(type == "run" || type == "schedule") && (
|
||||
<AgentRunStatusChip status={status} />
|
||||
)}
|
||||
{type == "preset" && (
|
||||
<div className="flex items-center text-sm font-medium text-neutral-700">
|
||||
<PushPinSimpleIcon className="mr-1 size-4 text-foreground" /> Preset
|
||||
</div>
|
||||
)}
|
||||
{type == "preset.triggered" && (
|
||||
<div className="flex items-center justify-between">
|
||||
<AgentStatusChip status={status} />
|
||||
|
||||
<div className="flex items-center text-sm font-medium text-neutral-700">
|
||||
{status == "inactive" ? (
|
||||
<Link2OffIcon className="mr-1 size-4 text-foreground" />
|
||||
) : (
|
||||
<Link2Icon className="mr-1 size-4 text-foreground" />
|
||||
)}{" "}
|
||||
Trigger
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="mt-5 flex items-center justify-between">
|
||||
<h3 className="truncate pr-2 text-base font-medium text-neutral-900">
|
||||
{title}
|
||||
</h3>
|
||||
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="ghost" className="h-5 w-5 p-0">
|
||||
<MoreVertical className="h-5 w-5" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent>
|
||||
{onPinAsPreset && (
|
||||
<DropdownMenuItem onClick={onPinAsPreset}>
|
||||
Pin as a preset
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
|
||||
{/* <DropdownMenuItem onClick={onRename}>Rename</DropdownMenuItem> */}
|
||||
|
||||
<DropdownMenuItem onClick={onDelete}>Delete</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
|
||||
{timestamp && (
|
||||
<p
|
||||
className="mt-1 text-sm font-normal text-neutral-500"
|
||||
title={new Date(timestamp).toString()}
|
||||
>
|
||||
{isPast(timestamp) ? "Ran" : "Runs in"}{" "}
|
||||
{formatDistanceToNow(timestamp, { addSuffix: true })}
|
||||
</p>
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,237 @@
|
||||
"use client";
|
||||
import { Plus } from "lucide-react";
|
||||
import React, { useEffect, useState } from "react";
|
||||
|
||||
import {
|
||||
GraphExecutionID,
|
||||
GraphExecutionMeta,
|
||||
LibraryAgent,
|
||||
LibraryAgentPreset,
|
||||
LibraryAgentPresetID,
|
||||
Schedule,
|
||||
ScheduleID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
import { Badge } from "@/components/__legacy__/ui/badge";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import LoadingBox, { LoadingSpinner } from "@/components/__legacy__/ui/loading";
|
||||
import { Separator } from "@/components/__legacy__/ui/separator";
|
||||
import { ScrollArea } from "@/components/__legacy__/ui/scroll-area";
|
||||
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
|
||||
import { AgentRunsQuery } from "../use-agent-runs";
|
||||
import { agentRunStatusMap } from "./agent-run-status-chip";
|
||||
import { AgentRunSummaryCard } from "./agent-run-summary-card";
|
||||
|
||||
interface AgentRunsSelectorListProps {
|
||||
agent: LibraryAgent;
|
||||
agentRunsQuery: AgentRunsQuery;
|
||||
agentPresets: LibraryAgentPreset[];
|
||||
schedules: Schedule[];
|
||||
selectedView: { type: "run" | "preset" | "schedule"; id?: string };
|
||||
allowDraftNewRun?: boolean;
|
||||
onSelectRun: (id: GraphExecutionID) => void;
|
||||
onSelectPreset: (preset: LibraryAgentPresetID) => void;
|
||||
onSelectSchedule: (id: ScheduleID) => void;
|
||||
onSelectDraftNewRun: () => void;
|
||||
doDeleteRun: (id: GraphExecutionMeta) => void;
|
||||
doDeletePreset: (id: LibraryAgentPresetID) => void;
|
||||
doDeleteSchedule: (id: ScheduleID) => void;
|
||||
doCreatePresetFromRun?: (id: GraphExecutionID) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function AgentRunsSelectorList({
|
||||
agent,
|
||||
agentRunsQuery: {
|
||||
agentRuns,
|
||||
agentRunCount,
|
||||
agentRunsLoading,
|
||||
hasMoreRuns,
|
||||
fetchMoreRuns,
|
||||
isFetchingMoreRuns,
|
||||
},
|
||||
agentPresets,
|
||||
schedules,
|
||||
selectedView,
|
||||
allowDraftNewRun = true,
|
||||
onSelectRun,
|
||||
onSelectPreset,
|
||||
onSelectSchedule,
|
||||
onSelectDraftNewRun,
|
||||
doDeleteRun,
|
||||
doDeletePreset,
|
||||
doDeleteSchedule,
|
||||
doCreatePresetFromRun,
|
||||
className,
|
||||
}: AgentRunsSelectorListProps): React.ReactElement {
|
||||
const [activeListTab, setActiveListTab] = useState<"runs" | "scheduled">(
|
||||
"runs",
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (selectedView.type === "schedule") {
|
||||
setActiveListTab("scheduled");
|
||||
} else {
|
||||
setActiveListTab("runs");
|
||||
}
|
||||
}, [selectedView]);
|
||||
|
||||
const listItemClasses = "h-28 w-72 lg:w-full lg:h-32";
|
||||
|
||||
return (
|
||||
<aside className={cn("flex flex-col gap-4", className)}>
|
||||
{allowDraftNewRun ? (
|
||||
<Button
|
||||
className={"mb-4 hidden lg:flex"}
|
||||
onClick={onSelectDraftNewRun}
|
||||
leftIcon={<Plus className="h-6 w-6" />}
|
||||
>
|
||||
New {agent.has_external_trigger ? "trigger" : "run"}
|
||||
</Button>
|
||||
) : null}
|
||||
|
||||
<div className="flex gap-2">
|
||||
<Badge
|
||||
variant={activeListTab === "runs" ? "secondary" : "outline"}
|
||||
className="cursor-pointer gap-2 rounded-full text-base"
|
||||
onClick={() => setActiveListTab("runs")}
|
||||
>
|
||||
<span>Runs</span>
|
||||
<span className="text-neutral-600">
|
||||
{agentRunCount ?? <LoadingSpinner className="size-4" />}
|
||||
</span>
|
||||
</Badge>
|
||||
|
||||
<Badge
|
||||
variant={activeListTab === "scheduled" ? "secondary" : "outline"}
|
||||
className="cursor-pointer gap-2 rounded-full text-base"
|
||||
onClick={() => setActiveListTab("scheduled")}
|
||||
>
|
||||
<span>Scheduled</span>
|
||||
<span className="text-neutral-600">{schedules.length}</span>
|
||||
</Badge>
|
||||
</div>
|
||||
|
||||
{/* Runs / Schedules list */}
|
||||
{agentRunsLoading && activeListTab === "runs" ? (
|
||||
<LoadingBox className="h-28 w-full lg:h-[calc(100vh-300px)] lg:w-72 xl:w-80" />
|
||||
) : (
|
||||
<ScrollArea
|
||||
className="w-full lg:h-[calc(100vh-300px)] lg:w-72 xl:w-80"
|
||||
orientation={window.innerWidth >= 1024 ? "vertical" : "horizontal"}
|
||||
>
|
||||
<InfiniteScroll
|
||||
direction={window.innerWidth >= 1024 ? "vertical" : "horizontal"}
|
||||
hasNextPage={hasMoreRuns}
|
||||
fetchNextPage={fetchMoreRuns}
|
||||
isFetchingNextPage={isFetchingMoreRuns}
|
||||
>
|
||||
<div className="flex items-center gap-2 lg:flex-col">
|
||||
{/* New Run button - only in small layouts */}
|
||||
{allowDraftNewRun && (
|
||||
<Button
|
||||
size="large"
|
||||
className={
|
||||
"flex h-12 w-40 items-center gap-2 py-6 lg:hidden " +
|
||||
(selectedView.type == "run" && !selectedView.id
|
||||
? "agpt-card-selected text-accent"
|
||||
: "")
|
||||
}
|
||||
onClick={onSelectDraftNewRun}
|
||||
leftIcon={<Plus className="h-6 w-6" />}
|
||||
>
|
||||
New {agent.has_external_trigger ? "trigger" : "run"}
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{activeListTab === "runs" ? (
|
||||
<>
|
||||
{agentPresets
|
||||
.filter((preset) => preset.webhook) // Triggers
|
||||
.toSorted(
|
||||
(a, b) => b.updated_at.getTime() - a.updated_at.getTime(),
|
||||
)
|
||||
.map((preset) => (
|
||||
<AgentRunSummaryCard
|
||||
className={cn(listItemClasses, "lg:h-auto")}
|
||||
key={preset.id}
|
||||
type="preset.triggered"
|
||||
status={preset.is_active ? "active" : "inactive"}
|
||||
title={preset.name}
|
||||
// timestamp={preset.last_run_time} // TODO: implement this
|
||||
selected={selectedView.id === preset.id}
|
||||
onClick={() => onSelectPreset(preset.id)}
|
||||
onDelete={() => doDeletePreset(preset.id)}
|
||||
/>
|
||||
))}
|
||||
{agentPresets
|
||||
.filter((preset) => !preset.webhook) // Presets
|
||||
.toSorted(
|
||||
(a, b) => b.updated_at.getTime() - a.updated_at.getTime(),
|
||||
)
|
||||
.map((preset) => (
|
||||
<AgentRunSummaryCard
|
||||
className={cn(listItemClasses, "lg:h-auto")}
|
||||
key={preset.id}
|
||||
type="preset"
|
||||
title={preset.name}
|
||||
// timestamp={preset.last_run_time} // TODO: implement this
|
||||
selected={selectedView.id === preset.id}
|
||||
onClick={() => onSelectPreset(preset.id)}
|
||||
onDelete={() => doDeletePreset(preset.id)}
|
||||
/>
|
||||
))}
|
||||
{agentPresets.length > 0 && <Separator className="my-1" />}
|
||||
{agentRuns
|
||||
.toSorted((a, b) => {
|
||||
const aTime = a.started_at?.getTime() ?? 0;
|
||||
const bTime = b.started_at?.getTime() ?? 0;
|
||||
return bTime - aTime;
|
||||
})
|
||||
.map((run) => (
|
||||
<AgentRunSummaryCard
|
||||
className={listItemClasses}
|
||||
key={run.id}
|
||||
type="run"
|
||||
status={agentRunStatusMap[run.status]}
|
||||
title={
|
||||
(run.preset_id
|
||||
? agentPresets.find((p) => p.id == run.preset_id)
|
||||
?.name
|
||||
: null) ?? agent.name
|
||||
}
|
||||
timestamp={run.started_at ?? undefined}
|
||||
selected={selectedView.id === run.id}
|
||||
onClick={() => onSelectRun(run.id)}
|
||||
onDelete={() => doDeleteRun(run as GraphExecutionMeta)}
|
||||
onPinAsPreset={
|
||||
doCreatePresetFromRun
|
||||
? () => doCreatePresetFromRun(run.id)
|
||||
: undefined
|
||||
}
|
||||
/>
|
||||
))}
|
||||
</>
|
||||
) : (
|
||||
schedules.map((schedule) => (
|
||||
<AgentRunSummaryCard
|
||||
className={listItemClasses}
|
||||
key={schedule.id}
|
||||
type="schedule"
|
||||
status="scheduled" // TODO: implement active/inactive status for schedules
|
||||
title={schedule.name}
|
||||
timestamp={schedule.next_run_time}
|
||||
selected={selectedView.id === schedule.id}
|
||||
onClick={() => onSelectSchedule(schedule.id)}
|
||||
onDelete={() => doDeleteSchedule(schedule.id)}
|
||||
/>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</InfiniteScroll>
|
||||
</ScrollArea>
|
||||
)}
|
||||
</aside>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
"use client";
|
||||
import React, { useCallback, useMemo } from "react";
|
||||
|
||||
import {
|
||||
Graph,
|
||||
GraphExecutionID,
|
||||
Schedule,
|
||||
ScheduleID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
|
||||
import ActionButtonGroup from "@/components/__legacy__/action-button-group";
|
||||
import type { ButtonAction } from "@/components/__legacy__/types";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/__legacy__/ui/card";
|
||||
import { IconCross } from "@/components/__legacy__/ui/icons";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import LoadingBox from "@/components/__legacy__/ui/loading";
|
||||
import { useToastOnFail } from "@/components/molecules/Toast/use-toast";
|
||||
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
||||
import { formatScheduleTime } from "@/lib/timezone-utils";
|
||||
import { useUserTimezone } from "@/lib/hooks/useUserTimezone";
|
||||
import { PlayIcon } from "lucide-react";
|
||||
|
||||
import { AgentRunStatus } from "./agent-run-status-chip";
|
||||
|
||||
export function AgentScheduleDetailsView({
|
||||
graph,
|
||||
schedule,
|
||||
agentActions,
|
||||
onForcedRun,
|
||||
doDeleteSchedule,
|
||||
}: {
|
||||
graph: Graph;
|
||||
schedule: Schedule;
|
||||
agentActions: ButtonAction[];
|
||||
onForcedRun: (runID: GraphExecutionID) => void;
|
||||
doDeleteSchedule: (scheduleID: ScheduleID) => void;
|
||||
}): React.ReactNode {
|
||||
const api = useBackendAPI();
|
||||
|
||||
const selectedRunStatus: AgentRunStatus = "scheduled";
|
||||
|
||||
const toastOnFail = useToastOnFail();
|
||||
|
||||
// Get user's timezone for displaying schedule times
|
||||
const userTimezone = useUserTimezone();
|
||||
|
||||
const infoStats: { label: string; value: React.ReactNode }[] = useMemo(() => {
|
||||
return [
|
||||
{
|
||||
label: "Status",
|
||||
value:
|
||||
selectedRunStatus.charAt(0).toUpperCase() +
|
||||
selectedRunStatus.slice(1),
|
||||
},
|
||||
{
|
||||
label: "Schedule",
|
||||
value: humanizeCronExpression(schedule.cron),
|
||||
},
|
||||
{
|
||||
label: "Next run",
|
||||
value: formatScheduleTime(schedule.next_run_time, userTimezone),
|
||||
},
|
||||
];
|
||||
}, [schedule, selectedRunStatus, userTimezone]);
|
||||
|
||||
const agentRunInputs: Record<
|
||||
string,
|
||||
{ title?: string; /* type: BlockIOSubType; */ value: any }
|
||||
> = useMemo(() => {
|
||||
// TODO: show (link to) preset - https://github.com/Significant-Gravitas/AutoGPT/issues/9168
|
||||
|
||||
// Add type info from agent input schema
|
||||
return Object.fromEntries(
|
||||
Object.entries(schedule.input_data).map(([k, v]) => [
|
||||
k,
|
||||
{
|
||||
title: graph.input_schema.properties[k].title,
|
||||
/* TODO: type: agent.input_schema.properties[k].type */
|
||||
value: v,
|
||||
},
|
||||
]),
|
||||
);
|
||||
}, [graph, schedule]);
|
||||
|
||||
const runNow = useCallback(
|
||||
() =>
|
||||
api
|
||||
.executeGraph(
|
||||
graph.id,
|
||||
graph.version,
|
||||
schedule.input_data,
|
||||
schedule.input_credentials,
|
||||
"library",
|
||||
)
|
||||
.then((run) => onForcedRun(run.id))
|
||||
.catch(toastOnFail("execute agent")),
|
||||
[api, graph, schedule, onForcedRun, toastOnFail],
|
||||
);
|
||||
|
||||
const runActions: ButtonAction[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: (
|
||||
<>
|
||||
<PlayIcon className="mr-2 size-4" />
|
||||
Run now
|
||||
</>
|
||||
),
|
||||
callback: runNow,
|
||||
},
|
||||
{
|
||||
label: (
|
||||
<>
|
||||
<IconCross className="mr-2 size-4 px-0.5" />
|
||||
Delete schedule
|
||||
</>
|
||||
),
|
||||
callback: () => doDeleteSchedule(schedule.id),
|
||||
variant: "destructive",
|
||||
},
|
||||
],
|
||||
[runNow],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="agpt-div flex gap-6">
|
||||
<div className="flex flex-1 flex-col gap-4">
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">Info</CardTitle>
|
||||
</CardHeader>
|
||||
|
||||
<CardContent>
|
||||
<div className="flex justify-stretch gap-4">
|
||||
{infoStats.map(({ label, value }) => (
|
||||
<div key={label} className="flex-1">
|
||||
<p className="text-sm font-medium text-black">{label}</p>
|
||||
<p className="text-sm text-neutral-600">{value}</p>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
||||
<Card className="agpt-box">
|
||||
<CardHeader>
|
||||
<CardTitle className="font-poppins text-lg">Input</CardTitle>
|
||||
</CardHeader>
|
||||
<CardContent className="flex flex-col gap-4">
|
||||
{agentRunInputs !== undefined ? (
|
||||
Object.entries(agentRunInputs).map(([key, { title, value }]) => (
|
||||
<div key={key} className="flex flex-col gap-1.5">
|
||||
<label className="text-sm font-medium">{title || key}</label>
|
||||
<Input value={value} className="rounded-full" disabled />
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<LoadingBox spinnerSize={12} className="h-24" />
|
||||
)}
|
||||
</CardContent>
|
||||
</Card>
|
||||
</div>
|
||||
|
||||
{/* Run / Agent Actions */}
|
||||
<aside className="w-48 xl:w-56">
|
||||
<div className="flex flex-col gap-8">
|
||||
<ActionButtonGroup title="Run actions" actions={runActions} />
|
||||
|
||||
<ActionButtonGroup title="Agent actions" actions={agentActions} />
|
||||
</div>
|
||||
</aside>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState } from "react";
|
||||
import { Button } from "@/components/__legacy__/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/__legacy__/ui/dialog";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import { Textarea } from "@/components/__legacy__/ui/textarea";
|
||||
|
||||
interface CreatePresetDialogProps {
|
||||
open: boolean;
|
||||
onOpenChange: (open: boolean) => void;
|
||||
onConfirm: (name: string, description: string) => Promise<void> | void;
|
||||
}
|
||||
|
||||
export function CreatePresetDialog({
|
||||
open,
|
||||
onOpenChange,
|
||||
onConfirm,
|
||||
}: CreatePresetDialogProps) {
|
||||
const [name, setName] = useState("");
|
||||
const [description, setDescription] = useState("");
|
||||
|
||||
const handleSubmit = async () => {
|
||||
if (name.trim()) {
|
||||
await onConfirm(name.trim(), description.trim());
|
||||
setName("");
|
||||
setDescription("");
|
||||
onOpenChange(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCancel = () => {
|
||||
setName("");
|
||||
setDescription("");
|
||||
onOpenChange(false);
|
||||
};
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent) => {
|
||||
if (e.key === "Enter" && (e.metaKey || e.ctrlKey)) {
|
||||
e.preventDefault();
|
||||
handleSubmit();
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={open} onOpenChange={onOpenChange}>
|
||||
<DialogContent className="sm:max-w-[425px]">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create Preset</DialogTitle>
|
||||
<DialogDescription>
|
||||
Give your preset a name and description to help identify it later.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="grid gap-4 py-4">
|
||||
<div className="grid gap-2">
|
||||
<label htmlFor="preset-name" className="text-sm font-medium">
|
||||
Name *
|
||||
</label>
|
||||
<Input
|
||||
id="preset-name"
|
||||
placeholder="Enter preset name"
|
||||
value={name}
|
||||
onChange={(e) => setName(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
autoFocus
|
||||
/>
|
||||
</div>
|
||||
<div className="grid gap-2">
|
||||
<label htmlFor="preset-description" className="text-sm font-medium">
|
||||
Description
|
||||
</label>
|
||||
<Textarea
|
||||
id="preset-description"
|
||||
placeholder="Optional description"
|
||||
value={description}
|
||||
onChange={(e) => setDescription(e.target.value)}
|
||||
onKeyDown={handleKeyDown}
|
||||
rows={3}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={handleCancel}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button onClick={handleSubmit} disabled={!name.trim()}>
|
||||
Create Preset
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -2,7 +2,7 @@ import { useEffect, useState } from "react";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import { Button } from "@/components/__legacy__/ui/button";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { CronScheduler } from "@/components/contextual/CronScheduler/cron-scheduler";
|
||||
import { CronScheduler } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { getTimezoneDisplayName } from "@/lib/timezone-utils";
|
||||
import { useUserTimezone } from "@/lib/hooks/useUserTimezone";
|
||||
@@ -0,0 +1,210 @@
|
||||
import {
|
||||
GraphExecutionMeta as LegacyGraphExecutionMeta,
|
||||
GraphID,
|
||||
GraphExecutionID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import {
|
||||
getPaginatedTotalCount,
|
||||
getPaginationNextPageNumber,
|
||||
unpaginate,
|
||||
} from "@/app/api/helpers";
|
||||
import {
|
||||
getV1ListGraphExecutionsResponse,
|
||||
getV1ListGraphExecutionsResponse200,
|
||||
useGetV1ListGraphExecutionsInfinite,
|
||||
} from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||
import { GraphExecutionsPaginated } from "@/app/api/__generated__/models/graphExecutionsPaginated";
|
||||
import { GraphExecutionMeta as RawGraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
|
||||
|
||||
export type GraphExecutionMeta = Omit<
|
||||
RawGraphExecutionMeta,
|
||||
"id" | "user_id" | "graph_id" | "preset_id" | "stats"
|
||||
> &
|
||||
Pick<
|
||||
LegacyGraphExecutionMeta,
|
||||
"id" | "user_id" | "graph_id" | "preset_id" | "stats"
|
||||
>;
|
||||
|
||||
/** Hook to fetch runs for a specific graph, with support for infinite scroll.
|
||||
*
|
||||
* @param graphID - The ID of the graph to fetch agent runs for. This parameter is
|
||||
* optional in the sense that the hook doesn't run unless it is passed.
|
||||
* This way, it can be used in components where the graph ID is not
|
||||
* immediately available.
|
||||
*/
|
||||
export const useAgentRunsInfinite = (graphID?: GraphID) => {
|
||||
const queryClient = getQueryClient();
|
||||
const {
|
||||
data: queryResults,
|
||||
refetch: refetchRuns,
|
||||
isPending: agentRunsLoading,
|
||||
isRefetching: agentRunsReloading,
|
||||
hasNextPage: hasMoreRuns,
|
||||
fetchNextPage: fetchMoreRuns,
|
||||
isFetchingNextPage: isFetchingMoreRuns,
|
||||
queryKey,
|
||||
} = useGetV1ListGraphExecutionsInfinite(
|
||||
graphID!,
|
||||
{ page: 1, page_size: 20 },
|
||||
{
|
||||
query: {
|
||||
getNextPageParam: getPaginationNextPageNumber,
|
||||
|
||||
// Prevent query from running if graphID is not available (yet)
|
||||
...(!graphID
|
||||
? {
|
||||
enabled: false,
|
||||
queryFn: () =>
|
||||
// Fake empty response if graphID is not available (yet)
|
||||
Promise.resolve({
|
||||
status: 200,
|
||||
data: {
|
||||
executions: [],
|
||||
pagination: {
|
||||
current_page: 1,
|
||||
page_size: 20,
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
},
|
||||
},
|
||||
headers: new Headers(),
|
||||
} satisfies getV1ListGraphExecutionsResponse),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
queryClient,
|
||||
);
|
||||
|
||||
const agentRuns = queryResults ? unpaginate(queryResults, "executions") : [];
|
||||
const agentRunCount = getPaginatedTotalCount(queryResults);
|
||||
|
||||
const upsertAgentRun = (newAgentRun: GraphExecutionMeta) => {
|
||||
queryClient.setQueryData(
|
||||
queryKey,
|
||||
(currentQueryData: typeof queryResults) => {
|
||||
if (!currentQueryData?.pages || agentRunCount === undefined)
|
||||
return currentQueryData;
|
||||
|
||||
const exists = currentQueryData.pages.some((page) => {
|
||||
if (page.status !== 200) return false;
|
||||
|
||||
const response = page.data;
|
||||
return response.executions.some((run) => run.id === newAgentRun.id);
|
||||
});
|
||||
if (exists) {
|
||||
// If the run already exists, we update it
|
||||
return {
|
||||
...currentQueryData,
|
||||
pages: currentQueryData.pages.map((page) => {
|
||||
if (page.status !== 200) return page;
|
||||
const response = page.data;
|
||||
const executions = response.executions;
|
||||
|
||||
const index = executions.findIndex(
|
||||
(run) => run.id === newAgentRun.id,
|
||||
);
|
||||
if (index === -1) return page;
|
||||
|
||||
const newExecutions = [...executions];
|
||||
newExecutions[index] = newAgentRun;
|
||||
|
||||
return {
|
||||
...page,
|
||||
data: {
|
||||
...response,
|
||||
executions: newExecutions,
|
||||
},
|
||||
} satisfies getV1ListGraphExecutionsResponse;
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
// If the run does not exist, we add it to the first page
|
||||
const page = currentQueryData
|
||||
.pages[0] as getV1ListGraphExecutionsResponse200 & {
|
||||
headers: Headers;
|
||||
};
|
||||
const updatedExecutions = [newAgentRun, ...page.data.executions];
|
||||
const updatedPage = {
|
||||
...page,
|
||||
data: {
|
||||
...page.data,
|
||||
executions: updatedExecutions,
|
||||
},
|
||||
} satisfies getV1ListGraphExecutionsResponse;
|
||||
const updatedPages = [updatedPage, ...currentQueryData.pages.slice(1)];
|
||||
return {
|
||||
...currentQueryData,
|
||||
pages: updatedPages.map(
|
||||
// Increment the total runs count in the pagination info of all pages
|
||||
(page) =>
|
||||
page.status === 200
|
||||
? {
|
||||
...page,
|
||||
data: {
|
||||
...page.data,
|
||||
pagination: {
|
||||
...page.data.pagination,
|
||||
total_items: agentRunCount + 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
: page,
|
||||
),
|
||||
};
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const removeAgentRun = (runID: GraphExecutionID) => {
|
||||
queryClient.setQueryData(
|
||||
[queryKey, { page: 1, page_size: 20 }],
|
||||
(currentQueryData: typeof queryResults) => {
|
||||
if (!currentQueryData?.pages) return currentQueryData;
|
||||
|
||||
let found = false;
|
||||
return {
|
||||
...currentQueryData,
|
||||
pages: currentQueryData.pages.map((page) => {
|
||||
const response = page.data as GraphExecutionsPaginated;
|
||||
const filteredExecutions = response.executions.filter(
|
||||
(run) => run.id !== runID,
|
||||
);
|
||||
if (filteredExecutions.length < response.executions.length) {
|
||||
found = true;
|
||||
}
|
||||
|
||||
return {
|
||||
...page,
|
||||
data: {
|
||||
...response,
|
||||
executions: filteredExecutions,
|
||||
pagination: {
|
||||
...response.pagination,
|
||||
total_items:
|
||||
response.pagination.total_items - (found ? 1 : 0),
|
||||
},
|
||||
},
|
||||
};
|
||||
}),
|
||||
};
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
return {
|
||||
agentRuns: agentRuns as GraphExecutionMeta[],
|
||||
refetchRuns,
|
||||
agentRunCount,
|
||||
agentRunsLoading: agentRunsLoading || agentRunsReloading,
|
||||
hasMoreRuns,
|
||||
fetchMoreRuns,
|
||||
isFetchingMoreRuns,
|
||||
upsertAgentRun,
|
||||
removeAgentRun,
|
||||
};
|
||||
};
|
||||
|
||||
export type AgentRunsQuery = ReturnType<typeof useAgentRunsInfinite>;
|
||||
@@ -0,0 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { OldAgentLibraryView } from "../../agents/[id]/components/OldAgentLibraryView/OldAgentLibraryView";
|
||||
|
||||
export default function OldAgentLibraryPage() {
|
||||
return <OldAgentLibraryView />;
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import { environment } from "@/services/environment";
|
||||
import { getServerAuthToken } from "@/lib/autogpt-server-api/helpers";
|
||||
import { NextRequest } from "next/server";
|
||||
import { normalizeSSEStream, SSE_HEADERS } from "../../../sse-helpers";
|
||||
|
||||
/**
|
||||
* SSE Proxy for chat streaming.
|
||||
* Supports POST with context (page content + URL) in the request body.
|
||||
*/
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ sessionId: string }> },
|
||||
@@ -20,14 +23,17 @@ export async function POST(
|
||||
);
|
||||
}
|
||||
|
||||
// Get auth token from server-side session
|
||||
const token = await getServerAuthToken();
|
||||
|
||||
// Build backend URL
|
||||
const backendUrl = environment.getAGPTServerBaseUrl();
|
||||
const streamUrl = new URL(
|
||||
`/api/chat/sessions/${sessionId}/stream`,
|
||||
backendUrl,
|
||||
);
|
||||
|
||||
// Forward request to backend with auth header
|
||||
const headers: Record<string, string> = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "text/event-stream",
|
||||
@@ -57,15 +63,14 @@ export async function POST(
|
||||
});
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
return new Response(
|
||||
JSON.stringify({ error: "Empty response from chat service" }),
|
||||
{ status: 502, headers: { "Content-Type": "application/json" } },
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(normalizeSSEStream(response.body), {
|
||||
headers: SSE_HEADERS,
|
||||
// Return the SSE stream directly
|
||||
return new Response(response.body, {
|
||||
headers: {
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache, no-transform",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("SSE proxy error:", error);
|
||||
@@ -82,6 +87,13 @@ export async function POST(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resume an active stream for a session.
|
||||
*
|
||||
* Called by the AI SDK's `useChat(resume: true)` on page load.
|
||||
* Proxies to the backend which checks for an active stream and either
|
||||
* replays it (200 + SSE) or returns 204 No Content.
|
||||
*/
|
||||
export async function GET(
|
||||
_request: NextRequest,
|
||||
{ params }: { params: Promise<{ sessionId: string }> },
|
||||
@@ -112,6 +124,7 @@ export async function GET(
|
||||
headers,
|
||||
});
|
||||
|
||||
// 204 = no active stream to resume
|
||||
if (response.status === 204) {
|
||||
return new Response(null, { status: 204 });
|
||||
}
|
||||
@@ -124,13 +137,12 @@ export async function GET(
|
||||
});
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
return new Response(null, { status: 204 });
|
||||
}
|
||||
|
||||
return new Response(normalizeSSEStream(response.body), {
|
||||
return new Response(response.body, {
|
||||
headers: {
|
||||
...SSE_HEADERS,
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache, no-transform",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
"x-vercel-ai-ui-message-stream": "v1",
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
export const SSE_HEADERS = {
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache, no-transform",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
} as const;
|
||||
|
||||
export function normalizeSSEStream(
|
||||
input: ReadableStream<Uint8Array>,
|
||||
): ReadableStream<Uint8Array> {
|
||||
const decoder = new TextDecoder();
|
||||
const encoder = new TextEncoder();
|
||||
let buffer = "";
|
||||
|
||||
return input.pipeThrough(
|
||||
new TransformStream<Uint8Array, Uint8Array>({
|
||||
transform(chunk, controller) {
|
||||
buffer += decoder.decode(chunk, { stream: true });
|
||||
|
||||
const parts = buffer.split("\n\n");
|
||||
buffer = parts.pop() ?? "";
|
||||
|
||||
for (const part of parts) {
|
||||
const normalized = normalizeSSEEvent(part);
|
||||
controller.enqueue(encoder.encode(normalized + "\n\n"));
|
||||
}
|
||||
},
|
||||
flush(controller) {
|
||||
if (buffer.trim()) {
|
||||
const normalized = normalizeSSEEvent(buffer);
|
||||
controller.enqueue(encoder.encode(normalized + "\n\n"));
|
||||
}
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function normalizeSSEEvent(event: string): string {
|
||||
const lines = event.split("\n");
|
||||
const dataLines: string[] = [];
|
||||
const otherLines: string[] = [];
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.startsWith("data: ")) {
|
||||
dataLines.push(line.slice(6));
|
||||
} else {
|
||||
otherLines.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
if (dataLines.length === 0) return event;
|
||||
|
||||
const dataStr = dataLines.join("\n");
|
||||
try {
|
||||
const parsed = JSON.parse(dataStr) as Record<string, unknown>;
|
||||
if (parsed.type === "error") {
|
||||
const normalized = {
|
||||
type: "error",
|
||||
errorText:
|
||||
typeof parsed.errorText === "string"
|
||||
? parsed.errorText
|
||||
: "An unexpected error occurred",
|
||||
};
|
||||
const newData = `data: ${JSON.stringify(normalized)}`;
|
||||
return [...otherLines.filter((l) => l.length > 0), newData].join("\n");
|
||||
}
|
||||
} catch {
|
||||
// Not valid JSON — pass through as-is
|
||||
}
|
||||
|
||||
return event;
|
||||
}
|
||||
@@ -1,8 +1,20 @@
|
||||
import { environment } from "@/services/environment";
|
||||
import { getServerAuthToken } from "@/lib/autogpt-server-api/helpers";
|
||||
import { NextRequest } from "next/server";
|
||||
import { normalizeSSEStream, SSE_HEADERS } from "../../../sse-helpers";
|
||||
|
||||
/**
|
||||
* SSE Proxy for task stream reconnection.
|
||||
*
|
||||
* This endpoint allows clients to reconnect to an ongoing or recently completed
|
||||
* background task's stream. It replays missed messages from Redis Streams and
|
||||
* subscribes to live updates if the task is still running.
|
||||
*
|
||||
* Client contract:
|
||||
* 1. When receiving an operation_started event, store the task_id
|
||||
* 2. To reconnect: GET /api/chat/tasks/{taskId}/stream?last_message_id={idx}
|
||||
* 3. Messages are replayed from the last_message_id position
|
||||
* 4. Stream ends when "finish" event is received
|
||||
*/
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ taskId: string }> },
|
||||
@@ -12,12 +24,15 @@ export async function GET(
|
||||
const lastMessageId = searchParams.get("last_message_id") || "0-0";
|
||||
|
||||
try {
|
||||
// Get auth token from server-side session
|
||||
const token = await getServerAuthToken();
|
||||
|
||||
// Build backend URL
|
||||
const backendUrl = environment.getAGPTServerBaseUrl();
|
||||
const streamUrl = new URL(`/api/chat/tasks/${taskId}/stream`, backendUrl);
|
||||
streamUrl.searchParams.set("last_message_id", lastMessageId);
|
||||
|
||||
// Forward request to backend with auth header
|
||||
const headers: Record<string, string> = {
|
||||
Accept: "text/event-stream",
|
||||
"Cache-Control": "no-cache",
|
||||
@@ -41,12 +56,14 @@ export async function GET(
|
||||
});
|
||||
}
|
||||
|
||||
if (!response.body) {
|
||||
return new Response(null, { status: 204 });
|
||||
}
|
||||
|
||||
return new Response(normalizeSSEStream(response.body), {
|
||||
headers: SSE_HEADERS,
|
||||
// Return the SSE stream directly
|
||||
return new Response(response.body, {
|
||||
headers: {
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache, no-transform",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Task stream proxy error:", error);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { CronExpressionDialog } from "@/components/contextual/CronScheduler/cron-scheduler-dialog";
|
||||
import { CronExpressionDialog } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog";
|
||||
import { Form, FormField } from "@/components/__legacy__/ui/form";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
|
||||
@@ -6,7 +6,6 @@ import { SupabaseClient } from "@supabase/supabase-js";
|
||||
export const PROTECTED_PAGES = [
|
||||
"/auth/authorize",
|
||||
"/auth/integrations",
|
||||
"/copilot",
|
||||
"/monitor",
|
||||
"/build",
|
||||
"/onboarding",
|
||||
|
||||
@@ -7,8 +7,11 @@ import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
export enum Flag {
|
||||
BETA_BLOCKS = "beta-blocks",
|
||||
NEW_BLOCK_MENU = "new-block-menu",
|
||||
NEW_AGENT_RUNS = "new-agent-runs",
|
||||
GRAPH_SEARCH = "graph-search",
|
||||
ENABLE_ENHANCED_OUTPUT_HANDLING = "enable-enhanced-output-handling",
|
||||
NEW_FLOW_EDITOR = "new-flow-editor",
|
||||
BUILDER_VIEW_SWITCH = "builder-view-switch",
|
||||
SHARE_EXECUTION_RESULTS = "share-execution-results",
|
||||
AGENT_FAVORITING = "agent-favoriting",
|
||||
MARKETPLACE_SEARCH_TERMS = "marketplace-search-terms",
|
||||
@@ -21,8 +24,11 @@ const isPwMockEnabled = process.env.NEXT_PUBLIC_PW_TEST === "true";
|
||||
const defaultFlags = {
|
||||
[Flag.BETA_BLOCKS]: [],
|
||||
[Flag.NEW_BLOCK_MENU]: false,
|
||||
[Flag.NEW_AGENT_RUNS]: false,
|
||||
[Flag.GRAPH_SEARCH]: false,
|
||||
[Flag.ENABLE_ENHANCED_OUTPUT_HANDLING]: false,
|
||||
[Flag.NEW_FLOW_EDITOR]: false,
|
||||
[Flag.BUILDER_VIEW_SWITCH]: false,
|
||||
[Flag.SHARE_EXECUTION_RESULTS]: false,
|
||||
[Flag.AGENT_FAVORITING]: false,
|
||||
[Flag.MARKETPLACE_SEARCH_TERMS]: DEFAULT_SEARCH_TERMS,
|
||||
|
||||
@@ -11,18 +11,24 @@ test.beforeEach(async ({ page }) => {
|
||||
const buildPage = new BuildPage(page);
|
||||
const testUser = await getTestUser();
|
||||
|
||||
const { getId } = getSelectors(page);
|
||||
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await hasUrl(page, "/marketplace");
|
||||
|
||||
await page.goto("/build");
|
||||
await buildPage.closeTutorial();
|
||||
await buildPage.openBlocksPanel();
|
||||
|
||||
const [dictionaryBlock] = await buildPage.getFilteredBlocksFromAPI(
|
||||
(block) => block.name === "AddToDictionaryBlock",
|
||||
);
|
||||
|
||||
await buildPage.addBlock(dictionaryBlock);
|
||||
const blockCard = getId(`block-name-${dictionaryBlock.id}`);
|
||||
await blockCard.click();
|
||||
const blockInEditor = getId(dictionaryBlock.id).first();
|
||||
expect(blockInEditor).toBeAttached();
|
||||
|
||||
await buildPage.saveAgent("Test Agent", "Test Description");
|
||||
await test
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
// TODO: These tests were written for the old (legacy) builder.
|
||||
// They need to be updated to work with the new flow editor.
|
||||
|
||||
// Note: all the comments with //(number)! are for the docs
|
||||
//ignore them when reading the code, but if you change something,
|
||||
//make sure to update the docs! Your autoformmater will break this page,
|
||||
@@ -15,7 +12,7 @@ import { getTestUser } from "./utils/auth";
|
||||
|
||||
// Reason Ignore: admonishment is in the wrong place visually with correct prettier rules
|
||||
// prettier-ignore
|
||||
test.describe.skip("Build", () => { //(1)!
|
||||
test.describe("Build", () => { //(1)!
|
||||
let buildPage: BuildPage; //(2)!
|
||||
|
||||
// Reason Ignore: admonishment is in the wrong place visually with correct prettier rules
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Locator, Page } from "@playwright/test";
|
||||
import { expect, Locator, Page } from "@playwright/test";
|
||||
import { Block as APIBlock } from "../../lib/autogpt-server-api/types";
|
||||
import { beautifyString } from "../../lib/utils";
|
||||
import { isVisible } from "../utils/assertion";
|
||||
import { BasePage } from "./base.page";
|
||||
|
||||
export interface Block {
|
||||
@@ -26,39 +27,32 @@ export class BuildPage extends BasePage {
|
||||
try {
|
||||
await this.page
|
||||
.getByRole("button", { name: "Skip Tutorial", exact: true })
|
||||
.click({ timeout: 3000 });
|
||||
} catch (_error) {
|
||||
console.info("Tutorial not shown or already dismissed");
|
||||
.click();
|
||||
} catch (error) {
|
||||
console.info("Error closing tutorial:", error);
|
||||
}
|
||||
}
|
||||
|
||||
async openBlocksPanel(): Promise<void> {
|
||||
const popoverContent = this.page.locator(
|
||||
'[data-id="blocks-control-popover-content"]',
|
||||
);
|
||||
const isPanelOpen = await popoverContent.isVisible();
|
||||
const isPanelOpen = await this.page
|
||||
.getByTestId("blocks-control-blocks-label")
|
||||
.isVisible();
|
||||
|
||||
if (!isPanelOpen) {
|
||||
await this.page.getByTestId("blocks-control-blocks-button").click();
|
||||
await popoverContent.waitFor({ state: "visible", timeout: 5000 });
|
||||
}
|
||||
}
|
||||
|
||||
async closeBlocksPanel(): Promise<void> {
|
||||
const popoverContent = this.page.locator(
|
||||
'[data-id="blocks-control-popover-content"]',
|
||||
);
|
||||
if (await popoverContent.isVisible()) {
|
||||
await this.page.getByTestId("blocks-control-blocks-button").click();
|
||||
}
|
||||
await this.page.getByTestId("profile-popout-menu-trigger").click();
|
||||
}
|
||||
|
||||
async saveAgent(
|
||||
name: string = "Test Agent",
|
||||
description: string = "",
|
||||
): Promise<void> {
|
||||
console.log(`Saving agent '${name}' with description '${description}'`);
|
||||
await this.page.getByTestId("save-control-save-button").click();
|
||||
console.log(`💾 Saving agent '${name}' with description '${description}'`);
|
||||
await this.page.getByTestId("blocks-control-save-button").click();
|
||||
await this.page.getByTestId("save-control-name-input").fill(name);
|
||||
await this.page
|
||||
.getByTestId("save-control-description-input")
|
||||
@@ -113,34 +107,32 @@ export class BuildPage extends BasePage {
|
||||
await this.openBlocksPanel();
|
||||
|
||||
const searchInput = this.page.locator(
|
||||
'[data-id="blocks-control-search-bar"] input[type="text"]',
|
||||
'[data-id="blocks-control-search-input"]',
|
||||
);
|
||||
|
||||
const displayName = this.getDisplayName(block.name);
|
||||
await searchInput.clear();
|
||||
await searchInput.fill(displayName);
|
||||
|
||||
const blockCardId = block.id.replace(/[^a-zA-Z0-9]/g, "");
|
||||
const blockCard = this.page.locator(
|
||||
`[data-id="block-card-${blockCardId}"]`,
|
||||
);
|
||||
const blockCard = this.page.getByTestId(`block-name-${block.id}`);
|
||||
|
||||
try {
|
||||
// Wait for the block card to be visible with a reasonable timeout
|
||||
await blockCard.waitFor({ state: "visible", timeout: 10000 });
|
||||
await blockCard.click();
|
||||
const blockInEditor = this.page.getByTestId(block.id).first();
|
||||
expect(blockInEditor).toBeAttached();
|
||||
} catch (error) {
|
||||
console.log(
|
||||
`Block ${block.name} (display: ${displayName}) returned from the API but not found in block list`,
|
||||
`❌ ❌ Block ${block.name} (display: ${displayName}) returned from the API but not found in block list`,
|
||||
);
|
||||
console.log(`Error: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async hasBlock(_block: Block) {
|
||||
// In the new flow editor, verify a node exists on the canvas
|
||||
const node = this.page.locator('[data-id^="custom-node-"]').first();
|
||||
await node.isVisible();
|
||||
async hasBlock(block: Block) {
|
||||
const blockInEditor = this.page.getByTestId(block.id).first();
|
||||
await blockInEditor.isVisible();
|
||||
}
|
||||
|
||||
async getBlockInputs(blockId: string): Promise<string[]> {
|
||||
@@ -167,7 +159,7 @@ export class BuildPage extends BasePage {
|
||||
|
||||
// Clear any existing search to ensure we see all blocks in the category
|
||||
const searchInput = this.page.locator(
|
||||
'[data-id="blocks-control-search-bar"] input[type="text"]',
|
||||
'[data-id="blocks-control-search-input"]',
|
||||
);
|
||||
await searchInput.clear();
|
||||
|
||||
@@ -399,13 +391,13 @@ export class BuildPage extends BasePage {
|
||||
|
||||
async isRunButtonEnabled(): Promise<boolean> {
|
||||
console.log(`checking if run button is enabled`);
|
||||
const runButton = this.page.locator('[data-id="run-graph-button"]');
|
||||
const runButton = this.page.getByTestId("primary-action-run-agent");
|
||||
return await runButton.isEnabled();
|
||||
}
|
||||
|
||||
async runAgent(): Promise<void> {
|
||||
console.log(`clicking run button`);
|
||||
const runButton = this.page.locator('[data-id="run-graph-button"]');
|
||||
const runButton = this.page.getByTestId("primary-action-run-agent");
|
||||
await runButton.click();
|
||||
await this.page.waitForTimeout(1000);
|
||||
await runButton.click();
|
||||
@@ -432,7 +424,7 @@ export class BuildPage extends BasePage {
|
||||
async waitForSaveButton(): Promise<void> {
|
||||
console.log(`waiting for save button`);
|
||||
await this.page.waitForSelector(
|
||||
'[data-testid="save-control-save-button"]:not([disabled])',
|
||||
'[data-testid="blocks-control-save-button"]:not([disabled])',
|
||||
);
|
||||
}
|
||||
|
||||
@@ -534,22 +526,27 @@ export class BuildPage extends BasePage {
|
||||
async createDummyAgent() {
|
||||
await this.closeTutorial();
|
||||
await this.openBlocksPanel();
|
||||
const dictionaryBlock = await this.getDictionaryBlockDetails();
|
||||
|
||||
const searchInput = this.page.locator(
|
||||
'[data-id="blocks-control-search-bar"] input[type="text"]',
|
||||
'[data-id="blocks-control-search-input"]',
|
||||
);
|
||||
|
||||
const displayName = this.getDisplayName(dictionaryBlock.name);
|
||||
await searchInput.clear();
|
||||
await searchInput.fill("Add to Dictionary");
|
||||
|
||||
const blockCard = this.page.locator('[data-id^="block-card-"]').first();
|
||||
try {
|
||||
await blockCard.waitFor({ state: "visible", timeout: 10000 });
|
||||
await isVisible(this.page.getByText("Output"));
|
||||
|
||||
await searchInput.fill(displayName);
|
||||
|
||||
const blockCard = this.page.getByTestId(`block-name-${dictionaryBlock.id}`);
|
||||
if (await blockCard.isVisible()) {
|
||||
await blockCard.click();
|
||||
} catch (error) {
|
||||
console.log("Could not find Add to Dictionary block:", error);
|
||||
const blockInEditor = this.page.getByTestId(dictionaryBlock.id).first();
|
||||
expect(blockInEditor).toBeAttached();
|
||||
}
|
||||
|
||||
await this.saveAgent("Test Agent", "Test Description");
|
||||
await expect(this.isRunButtonEnabled()).resolves.toBeTruthy();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ Below is a comprehensive list of all available blocks, categorized by their prim
|
||||
| [Get List Item](block-integrations/basic.md#get-list-item) | Returns the element at the given index |
|
||||
| [Get Store Agent Details](block-integrations/system/store_operations.md#get-store-agent-details) | Get detailed information about an agent from the store |
|
||||
| [Get Weather Information](block-integrations/basic.md#get-weather-information) | Retrieves weather information for a specified location using OpenWeatherMap API |
|
||||
| [Human In The Loop](block-integrations/basic.md#human-in-the-loop) | Pause execution for human review |
|
||||
| [Human In The Loop](block-integrations/basic.md#human-in-the-loop) | Pause execution and wait for human approval or modification of data |
|
||||
| [List Is Empty](block-integrations/basic.md#list-is-empty) | Checks if a list is empty |
|
||||
| [List Library Agents](block-integrations/system/library_operations.md#list-library-agents) | List all agents in your personal library |
|
||||
| [Note](block-integrations/basic.md#note) | A visual annotation block that displays a sticky note in the workflow editor for documentation and organization purposes |
|
||||
|
||||
@@ -975,7 +975,7 @@ A travel planning application could use this block to provide users with current
|
||||
## Human In The Loop
|
||||
|
||||
### What it is
|
||||
Pause execution for human review. Data flows through approved_data or rejected_data output based on the reviewer's decision. Outputs contain the actual data, not status strings.
|
||||
Pause execution and wait for human approval or modification of data
|
||||
|
||||
### How it works
|
||||
<!-- MANUAL: how_it_works -->
|
||||
@@ -988,18 +988,18 @@ This enables human oversight at critical points in automated workflows, ensuring
|
||||
|
||||
| Input | Description | Type | Required |
|
||||
|-------|-------------|------|----------|
|
||||
| data | The data to be reviewed by a human user. This exact data will be passed through to either approved_data or rejected_data output based on the reviewer's decision. | Data | Yes |
|
||||
| name | A descriptive name for what this data represents. This helps the reviewer understand what they are reviewing. | str | Yes |
|
||||
| editable | Whether the human reviewer can edit the data before approving or rejecting it | bool | No |
|
||||
| data | The data to be reviewed by a human user | Data | Yes |
|
||||
| name | A descriptive name for what this data represents | str | Yes |
|
||||
| editable | Whether the human reviewer can edit the data | bool | No |
|
||||
|
||||
### Outputs
|
||||
|
||||
| Output | Description | Type |
|
||||
|--------|-------------|------|
|
||||
| error | Error message if the operation failed | str |
|
||||
| approved_data | Outputs the input data when the reviewer APPROVES it. The value is the actual data itself (not a status string like 'APPROVED'). If the reviewer edited the data, this contains the modified version. Connect downstream blocks here for the 'approved' workflow path. | Approved Data |
|
||||
| rejected_data | Outputs the input data when the reviewer REJECTS it. The value is the actual data itself (not a status string like 'REJECTED'). If the reviewer edited the data, this contains the modified version. Connect downstream blocks here for the 'rejected' workflow path. | Rejected Data |
|
||||
| review_message | Optional message provided by the reviewer explaining their decision. Only outputs when the reviewer provides a message; this pin does not fire if no message was given. | str |
|
||||
| approved_data | The data when approved (may be modified by reviewer) | Approved Data |
|
||||
| rejected_data | The data when rejected (may be modified by reviewer) | Rejected Data |
|
||||
| review_message | Any message provided by the reviewer | str |
|
||||
|
||||
### Possible use case
|
||||
<!-- MANUAL: use_case -->
|
||||
|
||||
Reference in New Issue
Block a user