mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-13 00:58:16 -05:00
Compare commits
7 Commits
fix/chat-f
...
swiftyos/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a86d750cf5 | ||
|
|
13bd648731 | ||
|
|
3d7ee7cc29 | ||
|
|
1ea52934cd | ||
|
|
7b6db6e260 | ||
|
|
2c9563353e | ||
|
|
fb2a70e2d8 |
@@ -44,7 +44,7 @@ test-data:
|
||||
cd backend && poetry run python test/test_data_creator.py
|
||||
|
||||
load-store-agents:
|
||||
cd backend && poetry run load-store-agents
|
||||
cd backend && poetry run python test/load_store_agents.py
|
||||
|
||||
help:
|
||||
@echo "Usage: make <target>"
|
||||
|
||||
@@ -1371,7 +1371,7 @@ async def create_base(
|
||||
if tables:
|
||||
params["tables"] = tables
|
||||
|
||||
logger.debug(f"Creating Airtable base with params: {params}")
|
||||
print(params)
|
||||
|
||||
response = await Requests().post(
|
||||
"https://api.airtable.com/v0/meta/bases",
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
{
|
||||
"action": "created",
|
||||
"discussion": {
|
||||
"repository_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"category": {
|
||||
"id": 12345678,
|
||||
"node_id": "DIC_kwDOJKSTjM4CXXXX",
|
||||
"repository_id": 614765452,
|
||||
"emoji": ":pray:",
|
||||
"name": "Q&A",
|
||||
"description": "Ask the community for help",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2023-03-16T09:21:07Z",
|
||||
"slug": "q-a",
|
||||
"is_answerable": true
|
||||
},
|
||||
"answer_html_url": null,
|
||||
"answer_chosen_at": null,
|
||||
"answer_chosen_by": null,
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/discussions/9999",
|
||||
"id": 5000000001,
|
||||
"node_id": "D_kwDOJKSTjM4AYYYY",
|
||||
"number": 9999,
|
||||
"title": "How do I configure custom blocks?",
|
||||
"user": {
|
||||
"login": "curious-user",
|
||||
"id": 22222222,
|
||||
"node_id": "MDQ6VXNlcjIyMjIyMjIy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/22222222?v=4",
|
||||
"url": "https://api.github.com/users/curious-user",
|
||||
"html_url": "https://github.com/curious-user",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"state": "open",
|
||||
"state_reason": null,
|
||||
"locked": false,
|
||||
"comments": 0,
|
||||
"created_at": "2024-12-01T17:00:00Z",
|
||||
"updated_at": "2024-12-01T17:00:00Z",
|
||||
"author_association": "NONE",
|
||||
"active_lock_reason": null,
|
||||
"body": "## Question\n\nI'm trying to create a custom block for my specific use case. I've read the documentation but I'm not sure how to:\n\n1. Define the input/output schema\n2. Handle authentication\n3. Test my block locally\n\nCan someone point me to examples or provide guidance?\n\n## Environment\n\n- AutoGPT Platform version: latest\n- Python: 3.11",
|
||||
"reactions": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/discussions/9999/reactions",
|
||||
"total_count": 0,
|
||||
"+1": 0,
|
||||
"-1": 0,
|
||||
"laugh": 0,
|
||||
"hooray": 0,
|
||||
"confused": 0,
|
||||
"heart": 0,
|
||||
"rocket": 0,
|
||||
"eyes": 0
|
||||
},
|
||||
"timeline_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/discussions/9999/timeline"
|
||||
},
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"type": "Organization",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-12-01T17:00:00Z",
|
||||
"pushed_at": "2024-12-01T12:00:00Z",
|
||||
"stargazers_count": 170000,
|
||||
"watchers_count": 170000,
|
||||
"language": "Python",
|
||||
"has_discussions": true,
|
||||
"forks_count": 45000,
|
||||
"visibility": "public",
|
||||
"default_branch": "master"
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"sender": {
|
||||
"login": "curious-user",
|
||||
"id": 22222222,
|
||||
"node_id": "MDQ6VXNlcjIyMjIyMjIy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/22222222?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/curious-user",
|
||||
"html_url": "https://github.com/curious-user",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
||||
@@ -1,112 +0,0 @@
|
||||
{
|
||||
"action": "opened",
|
||||
"issue": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345",
|
||||
"repository_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345/labels{/name}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345/comments",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345/events",
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/issues/12345",
|
||||
"id": 2000000001,
|
||||
"node_id": "I_kwDOJKSTjM5wXXXX",
|
||||
"number": 12345,
|
||||
"title": "Bug: Application crashes when processing large files",
|
||||
"user": {
|
||||
"login": "bug-reporter",
|
||||
"id": 11111111,
|
||||
"node_id": "MDQ6VXNlcjExMTExMTEx",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/11111111?v=4",
|
||||
"url": "https://api.github.com/users/bug-reporter",
|
||||
"html_url": "https://github.com/bug-reporter",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"labels": [
|
||||
{
|
||||
"id": 5272676214,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABOkandg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/bug",
|
||||
"name": "bug",
|
||||
"color": "d73a4a",
|
||||
"default": true,
|
||||
"description": "Something isn't working"
|
||||
}
|
||||
],
|
||||
"state": "open",
|
||||
"locked": false,
|
||||
"assignee": null,
|
||||
"assignees": [],
|
||||
"milestone": null,
|
||||
"comments": 0,
|
||||
"created_at": "2024-12-01T16:00:00Z",
|
||||
"updated_at": "2024-12-01T16:00:00Z",
|
||||
"closed_at": null,
|
||||
"author_association": "NONE",
|
||||
"active_lock_reason": null,
|
||||
"body": "## Description\n\nWhen I try to process a file larger than 100MB, the application crashes with an out of memory error.\n\n## Steps to Reproduce\n\n1. Open the application\n2. Select a file larger than 100MB\n3. Click 'Process'\n4. Application crashes\n\n## Expected Behavior\n\nThe application should handle large files gracefully.\n\n## Environment\n\n- OS: Ubuntu 22.04\n- Python: 3.11\n- AutoGPT Version: 1.0.0",
|
||||
"reactions": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345/reactions",
|
||||
"total_count": 0,
|
||||
"+1": 0,
|
||||
"-1": 0,
|
||||
"laugh": 0,
|
||||
"hooray": 0,
|
||||
"confused": 0,
|
||||
"heart": 0,
|
||||
"rocket": 0,
|
||||
"eyes": 0
|
||||
},
|
||||
"timeline_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/12345/timeline",
|
||||
"state_reason": null
|
||||
},
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"type": "Organization",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-12-01T16:00:00Z",
|
||||
"pushed_at": "2024-12-01T12:00:00Z",
|
||||
"stargazers_count": 170000,
|
||||
"watchers_count": 170000,
|
||||
"language": "Python",
|
||||
"forks_count": 45000,
|
||||
"open_issues_count": 190,
|
||||
"visibility": "public",
|
||||
"default_branch": "master"
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"sender": {
|
||||
"login": "bug-reporter",
|
||||
"id": 11111111,
|
||||
"node_id": "MDQ6VXNlcjExMTExMTEx",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/11111111?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/bug-reporter",
|
||||
"html_url": "https://github.com/bug-reporter",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
{
|
||||
"action": "published",
|
||||
"release": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases/123456789",
|
||||
"assets_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases/123456789/assets",
|
||||
"upload_url": "https://uploads.github.com/repos/Significant-Gravitas/AutoGPT/releases/123456789/assets{?name,label}",
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/releases/tag/v1.0.0",
|
||||
"id": 123456789,
|
||||
"author": {
|
||||
"login": "ntindle",
|
||||
"id": 12345678,
|
||||
"node_id": "MDQ6VXNlcjEyMzQ1Njc4",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12345678?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/ntindle",
|
||||
"html_url": "https://github.com/ntindle",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
},
|
||||
"node_id": "RE_kwDOJKSTjM4HWwAA",
|
||||
"tag_name": "v1.0.0",
|
||||
"target_commitish": "master",
|
||||
"name": "AutoGPT Platform v1.0.0",
|
||||
"draft": false,
|
||||
"prerelease": false,
|
||||
"created_at": "2024-12-01T10:00:00Z",
|
||||
"published_at": "2024-12-01T12:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases/assets/987654321",
|
||||
"id": 987654321,
|
||||
"node_id": "RA_kwDOJKSTjM4HWwBB",
|
||||
"name": "autogpt-v1.0.0.zip",
|
||||
"label": "Release Package",
|
||||
"content_type": "application/zip",
|
||||
"state": "uploaded",
|
||||
"size": 52428800,
|
||||
"download_count": 0,
|
||||
"created_at": "2024-12-01T11:30:00Z",
|
||||
"updated_at": "2024-12-01T11:35:00Z",
|
||||
"browser_download_url": "https://github.com/Significant-Gravitas/AutoGPT/releases/download/v1.0.0/autogpt-v1.0.0.zip"
|
||||
}
|
||||
],
|
||||
"tarball_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tarball/v1.0.0",
|
||||
"zipball_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/zipball/v1.0.0",
|
||||
"body": "## What's New\n\n- Feature 1: Amazing new capability\n- Feature 2: Performance improvements\n- Bug fixes and stability improvements\n\n## Breaking Changes\n\nNone\n\n## Contributors\n\nThanks to all our contributors!"
|
||||
},
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"type": "Organization",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-12-01T12:00:00Z",
|
||||
"pushed_at": "2024-12-01T12:00:00Z",
|
||||
"stargazers_count": 170000,
|
||||
"watchers_count": 170000,
|
||||
"language": "Python",
|
||||
"forks_count": 45000,
|
||||
"visibility": "public",
|
||||
"default_branch": "master"
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"sender": {
|
||||
"login": "ntindle",
|
||||
"id": 12345678,
|
||||
"node_id": "MDQ6VXNlcjEyMzQ1Njc4",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12345678?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/ntindle",
|
||||
"html_url": "https://github.com/ntindle",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
{
|
||||
"action": "created",
|
||||
"starred_at": "2024-12-01T15:30:00Z",
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"type": "Organization",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-12-01T15:30:00Z",
|
||||
"pushed_at": "2024-12-01T12:00:00Z",
|
||||
"stargazers_count": 170001,
|
||||
"watchers_count": 170001,
|
||||
"language": "Python",
|
||||
"forks_count": 45000,
|
||||
"visibility": "public",
|
||||
"default_branch": "master"
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"sender": {
|
||||
"login": "awesome-contributor",
|
||||
"id": 98765432,
|
||||
"node_id": "MDQ6VXNlcjk4NzY1NDMy",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/98765432?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/awesome-contributor",
|
||||
"html_url": "https://github.com/awesome-contributor",
|
||||
"type": "User",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
||||
@@ -159,391 +159,3 @@ class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
|
||||
|
||||
# --8<-- [end:GithubTriggerExample]
|
||||
|
||||
|
||||
class GithubStarTriggerBlock(GitHubTriggerBase, Block):
|
||||
"""Trigger block for GitHub star events - useful for milestone celebrations."""
|
||||
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "star.created.json"
|
||||
)
|
||||
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#star
|
||||
"""
|
||||
|
||||
created: bool = False
|
||||
deleted: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The star events to subscribe to"
|
||||
)
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The star event that triggered the webhook ('created' or 'deleted')"
|
||||
)
|
||||
starred_at: str = SchemaField(
|
||||
description="ISO timestamp when the repo was starred (empty if deleted)"
|
||||
)
|
||||
stargazers_count: int = SchemaField(
|
||||
description="Current number of stars on the repository"
|
||||
)
|
||||
repository_name: str = SchemaField(
|
||||
description="Full name of the repository (owner/repo)"
|
||||
)
|
||||
repository_url: str = SchemaField(description="URL to the repository")
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(
|
||||
self.EXAMPLE_PAYLOAD_FILE.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
id="551e0a35-100b-49b7-89b8-3031322239b6",
|
||||
description="This block triggers on GitHub star events. "
|
||||
"Useful for celebrating milestones (e.g., 1k, 10k stars) or tracking engagement.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubStarTriggerBlock.Input,
|
||||
output_schema=GithubStarTriggerBlock.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="star.{event}",
|
||||
),
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"created": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("starred_at", example_payload.get("starred_at", "")),
|
||||
("stargazers_count", example_payload["repository"]["stargazers_count"]),
|
||||
("repository_name", example_payload["repository"]["full_name"]),
|
||||
("repository_url", example_payload["repository"]["html_url"]),
|
||||
],
|
||||
)
|
||||
|
||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
async for name, value in super().run(input_data, **kwargs):
|
||||
yield name, value
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "starred_at", input_data.payload.get("starred_at", "")
|
||||
yield "stargazers_count", input_data.payload["repository"]["stargazers_count"]
|
||||
yield "repository_name", input_data.payload["repository"]["full_name"]
|
||||
yield "repository_url", input_data.payload["repository"]["html_url"]
|
||||
|
||||
|
||||
class GithubReleaseTriggerBlock(GitHubTriggerBase, Block):
|
||||
"""Trigger block for GitHub release events - ideal for announcing new versions."""
|
||||
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "release.published.json"
|
||||
)
|
||||
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#release
|
||||
"""
|
||||
|
||||
published: bool = False
|
||||
unpublished: bool = False
|
||||
created: bool = False
|
||||
edited: bool = False
|
||||
deleted: bool = False
|
||||
prereleased: bool = False
|
||||
released: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The release events to subscribe to"
|
||||
)
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The release event that triggered the webhook (e.g., 'published')"
|
||||
)
|
||||
release: dict = SchemaField(description="The full release object")
|
||||
release_url: str = SchemaField(description="URL to the release page")
|
||||
tag_name: str = SchemaField(description="The release tag name (e.g., 'v1.0.0')")
|
||||
release_name: str = SchemaField(description="Human-readable release name")
|
||||
body: str = SchemaField(description="Release notes/description")
|
||||
prerelease: bool = SchemaField(description="Whether this is a prerelease")
|
||||
draft: bool = SchemaField(description="Whether this is a draft release")
|
||||
assets: list = SchemaField(description="List of release assets/files")
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(
|
||||
self.EXAMPLE_PAYLOAD_FILE.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
id="2052dd1b-74e1-46ac-9c87-c7a0e057b60b",
|
||||
description="This block triggers on GitHub release events. "
|
||||
"Perfect for automating announcements to Discord, Twitter, or other platforms.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubReleaseTriggerBlock.Input,
|
||||
output_schema=GithubReleaseTriggerBlock.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="release.{event}",
|
||||
),
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"published": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("release", example_payload["release"]),
|
||||
("release_url", example_payload["release"]["html_url"]),
|
||||
("tag_name", example_payload["release"]["tag_name"]),
|
||||
("release_name", example_payload["release"]["name"]),
|
||||
("body", example_payload["release"]["body"]),
|
||||
("prerelease", example_payload["release"]["prerelease"]),
|
||||
("draft", example_payload["release"]["draft"]),
|
||||
("assets", example_payload["release"]["assets"]),
|
||||
],
|
||||
)
|
||||
|
||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
async for name, value in super().run(input_data, **kwargs):
|
||||
yield name, value
|
||||
release = input_data.payload["release"]
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "release", release
|
||||
yield "release_url", release["html_url"]
|
||||
yield "tag_name", release["tag_name"]
|
||||
yield "release_name", release.get("name", "")
|
||||
yield "body", release.get("body", "")
|
||||
yield "prerelease", release["prerelease"]
|
||||
yield "draft", release["draft"]
|
||||
yield "assets", release["assets"]
|
||||
|
||||
|
||||
class GithubIssuesTriggerBlock(GitHubTriggerBase, Block):
|
||||
"""Trigger block for GitHub issues events - great for triage and notifications."""
|
||||
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "issues.opened.json"
|
||||
)
|
||||
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#issues
|
||||
"""
|
||||
|
||||
opened: bool = False
|
||||
edited: bool = False
|
||||
deleted: bool = False
|
||||
closed: bool = False
|
||||
reopened: bool = False
|
||||
assigned: bool = False
|
||||
unassigned: bool = False
|
||||
labeled: bool = False
|
||||
unlabeled: bool = False
|
||||
locked: bool = False
|
||||
unlocked: bool = False
|
||||
transferred: bool = False
|
||||
milestoned: bool = False
|
||||
demilestoned: bool = False
|
||||
pinned: bool = False
|
||||
unpinned: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The issue events to subscribe to"
|
||||
)
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The issue event that triggered the webhook (e.g., 'opened')"
|
||||
)
|
||||
number: int = SchemaField(description="The issue number")
|
||||
issue: dict = SchemaField(description="The full issue object")
|
||||
issue_url: str = SchemaField(description="URL to the issue")
|
||||
issue_title: str = SchemaField(description="The issue title")
|
||||
issue_body: str = SchemaField(description="The issue body/description")
|
||||
labels: list = SchemaField(description="List of labels on the issue")
|
||||
assignees: list = SchemaField(description="List of assignees")
|
||||
state: str = SchemaField(description="Issue state ('open' or 'closed')")
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(
|
||||
self.EXAMPLE_PAYLOAD_FILE.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
id="b2605464-e486-4bf4-aad3-d8a213c8a48a",
|
||||
description="This block triggers on GitHub issues events. "
|
||||
"Useful for automated triage, notifications, and welcoming first-time contributors.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubIssuesTriggerBlock.Input,
|
||||
output_schema=GithubIssuesTriggerBlock.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="issues.{event}",
|
||||
),
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"opened": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("number", example_payload["issue"]["number"]),
|
||||
("issue", example_payload["issue"]),
|
||||
("issue_url", example_payload["issue"]["html_url"]),
|
||||
("issue_title", example_payload["issue"]["title"]),
|
||||
("issue_body", example_payload["issue"]["body"]),
|
||||
("labels", example_payload["issue"]["labels"]),
|
||||
("assignees", example_payload["issue"]["assignees"]),
|
||||
("state", example_payload["issue"]["state"]),
|
||||
],
|
||||
)
|
||||
|
||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
async for name, value in super().run(input_data, **kwargs):
|
||||
yield name, value
|
||||
issue = input_data.payload["issue"]
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", issue["number"]
|
||||
yield "issue", issue
|
||||
yield "issue_url", issue["html_url"]
|
||||
yield "issue_title", issue["title"]
|
||||
yield "issue_body", issue.get("body") or ""
|
||||
yield "labels", issue["labels"]
|
||||
yield "assignees", issue["assignees"]
|
||||
yield "state", issue["state"]
|
||||
|
||||
|
||||
class GithubDiscussionTriggerBlock(GitHubTriggerBase, Block):
|
||||
"""Trigger block for GitHub discussion events - perfect for community Q&A sync."""
|
||||
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "discussion.created.json"
|
||||
)
|
||||
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#discussion
|
||||
"""
|
||||
|
||||
created: bool = False
|
||||
edited: bool = False
|
||||
deleted: bool = False
|
||||
answered: bool = False
|
||||
unanswered: bool = False
|
||||
labeled: bool = False
|
||||
unlabeled: bool = False
|
||||
locked: bool = False
|
||||
unlocked: bool = False
|
||||
category_changed: bool = False
|
||||
transferred: bool = False
|
||||
pinned: bool = False
|
||||
unpinned: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The discussion events to subscribe to"
|
||||
)
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The discussion event that triggered the webhook"
|
||||
)
|
||||
number: int = SchemaField(description="The discussion number")
|
||||
discussion: dict = SchemaField(description="The full discussion object")
|
||||
discussion_url: str = SchemaField(description="URL to the discussion")
|
||||
title: str = SchemaField(description="The discussion title")
|
||||
body: str = SchemaField(description="The discussion body")
|
||||
category: dict = SchemaField(description="The discussion category object")
|
||||
category_name: str = SchemaField(description="Name of the category")
|
||||
state: str = SchemaField(description="Discussion state")
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(
|
||||
self.EXAMPLE_PAYLOAD_FILE.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
id="87f847b3-d81a-424e-8e89-acadb5c9d52b",
|
||||
description="This block triggers on GitHub Discussions events. "
|
||||
"Great for syncing Q&A to Discord or auto-responding to common questions. "
|
||||
"Note: Discussions must be enabled on the repository.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubDiscussionTriggerBlock.Input,
|
||||
output_schema=GithubDiscussionTriggerBlock.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="discussion.{event}",
|
||||
),
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"created": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("number", example_payload["discussion"]["number"]),
|
||||
("discussion", example_payload["discussion"]),
|
||||
("discussion_url", example_payload["discussion"]["html_url"]),
|
||||
("title", example_payload["discussion"]["title"]),
|
||||
("body", example_payload["discussion"]["body"]),
|
||||
("category", example_payload["discussion"]["category"]),
|
||||
("category_name", example_payload["discussion"]["category"]["name"]),
|
||||
("state", example_payload["discussion"]["state"]),
|
||||
],
|
||||
)
|
||||
|
||||
async def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
async for name, value in super().run(input_data, **kwargs):
|
||||
yield name, value
|
||||
discussion = input_data.payload["discussion"]
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", discussion["number"]
|
||||
yield "discussion", discussion
|
||||
yield "discussion_url", discussion["html_url"]
|
||||
yield "title", discussion["title"]
|
||||
yield "body", discussion.get("body") or ""
|
||||
yield "category", discussion["category"]
|
||||
yield "category_name", discussion["category"]["name"]
|
||||
yield "state", discussion["state"]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
from typing import Iterator, Literal
|
||||
|
||||
@@ -65,7 +64,6 @@ class RedditComment(BaseModel):
|
||||
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_praw(creds: RedditCredentials) -> praw.Reddit:
|
||||
@@ -79,7 +77,7 @@ def get_praw(creds: RedditCredentials) -> praw.Reddit:
|
||||
me = client.user.me()
|
||||
if not me:
|
||||
raise ValueError("Invalid Reddit credentials.")
|
||||
logger.info(f"Logged in as Reddit user: {me.name}")
|
||||
print(f"Logged in as Reddit user: {me.name}")
|
||||
return client
|
||||
|
||||
|
||||
|
||||
@@ -1,45 +1,12 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Optional
|
||||
|
||||
import prisma.types
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.db import query_raw_with_schema
|
||||
from backend.util.json import SafeJson
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AccuracyAlertData(BaseModel):
|
||||
"""Alert data when accuracy drops significantly."""
|
||||
|
||||
graph_id: str
|
||||
user_id: Optional[str]
|
||||
drop_percent: float
|
||||
three_day_avg: float
|
||||
seven_day_avg: float
|
||||
detected_at: datetime
|
||||
|
||||
|
||||
class AccuracyLatestData(BaseModel):
|
||||
"""Latest execution accuracy data point."""
|
||||
|
||||
date: datetime
|
||||
daily_score: Optional[float]
|
||||
three_day_avg: Optional[float]
|
||||
seven_day_avg: Optional[float]
|
||||
fourteen_day_avg: Optional[float]
|
||||
|
||||
|
||||
class AccuracyTrendsResponse(BaseModel):
|
||||
"""Response model for accuracy trends and alerts."""
|
||||
|
||||
latest_data: AccuracyLatestData
|
||||
alert: Optional[AccuracyAlertData]
|
||||
historical_data: Optional[list[AccuracyLatestData]] = None
|
||||
|
||||
|
||||
async def log_raw_analytics(
|
||||
user_id: str,
|
||||
type: str,
|
||||
@@ -76,217 +43,3 @@ async def log_raw_metric(
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def get_accuracy_trends_and_alerts(
|
||||
graph_id: str,
|
||||
days_back: int = 30,
|
||||
user_id: Optional[str] = None,
|
||||
drop_threshold: float = 10.0,
|
||||
include_historical: bool = False,
|
||||
) -> AccuracyTrendsResponse:
|
||||
"""Get accuracy trends and detect alerts for a specific graph."""
|
||||
query_template = """
|
||||
WITH daily_scores AS (
|
||||
SELECT
|
||||
DATE(e."createdAt") as execution_date,
|
||||
AVG(CASE
|
||||
WHEN e.stats IS NOT NULL
|
||||
AND e.stats::json->>'correctness_score' IS NOT NULL
|
||||
AND e.stats::json->>'correctness_score' != 'null'
|
||||
THEN (e.stats::json->>'correctness_score')::float * 100
|
||||
ELSE NULL
|
||||
END) as daily_score
|
||||
FROM {schema_prefix}"AgentGraphExecution" e
|
||||
WHERE e."agentGraphId" = $1::text
|
||||
AND e."isDeleted" = false
|
||||
AND e."createdAt" >= $2::timestamp
|
||||
AND e."executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED')
|
||||
{user_filter}
|
||||
GROUP BY DATE(e."createdAt")
|
||||
HAVING COUNT(*) >= 3 -- Need at least 3 executions per day
|
||||
),
|
||||
trends AS (
|
||||
SELECT
|
||||
execution_date,
|
||||
daily_score,
|
||||
AVG(daily_score) OVER (
|
||||
ORDER BY execution_date
|
||||
ROWS BETWEEN 2 PRECEDING AND CURRENT ROW
|
||||
) as three_day_avg,
|
||||
AVG(daily_score) OVER (
|
||||
ORDER BY execution_date
|
||||
ROWS BETWEEN 6 PRECEDING AND CURRENT ROW
|
||||
) as seven_day_avg,
|
||||
AVG(daily_score) OVER (
|
||||
ORDER BY execution_date
|
||||
ROWS BETWEEN 13 PRECEDING AND CURRENT ROW
|
||||
) as fourteen_day_avg
|
||||
FROM daily_scores
|
||||
)
|
||||
SELECT *,
|
||||
CASE
|
||||
WHEN three_day_avg IS NOT NULL AND seven_day_avg IS NOT NULL AND seven_day_avg > 0
|
||||
THEN ((seven_day_avg - three_day_avg) / seven_day_avg * 100)
|
||||
ELSE NULL
|
||||
END as drop_percent
|
||||
FROM trends
|
||||
ORDER BY execution_date DESC
|
||||
{limit_clause}
|
||||
"""
|
||||
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days_back)
|
||||
params = [graph_id, start_date]
|
||||
user_filter = ""
|
||||
if user_id:
|
||||
user_filter = 'AND e."userId" = $3::text'
|
||||
params.append(user_id)
|
||||
|
||||
# Determine limit clause
|
||||
limit_clause = "" if include_historical else "LIMIT 1"
|
||||
|
||||
final_query = query_template.format(
|
||||
schema_prefix="{schema_prefix}",
|
||||
user_filter=user_filter,
|
||||
limit_clause=limit_clause,
|
||||
)
|
||||
|
||||
result = await query_raw_with_schema(final_query, *params)
|
||||
|
||||
if not result:
|
||||
return AccuracyTrendsResponse(
|
||||
latest_data=AccuracyLatestData(
|
||||
date=datetime.now(timezone.utc),
|
||||
daily_score=None,
|
||||
three_day_avg=None,
|
||||
seven_day_avg=None,
|
||||
fourteen_day_avg=None,
|
||||
),
|
||||
alert=None,
|
||||
)
|
||||
|
||||
latest = result[0]
|
||||
|
||||
alert = None
|
||||
if (
|
||||
latest["drop_percent"] is not None
|
||||
and latest["drop_percent"] >= drop_threshold
|
||||
and latest["three_day_avg"] is not None
|
||||
and latest["seven_day_avg"] is not None
|
||||
):
|
||||
alert = AccuracyAlertData(
|
||||
graph_id=graph_id,
|
||||
user_id=user_id,
|
||||
drop_percent=float(latest["drop_percent"]),
|
||||
three_day_avg=float(latest["three_day_avg"]),
|
||||
seven_day_avg=float(latest["seven_day_avg"]),
|
||||
detected_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Prepare historical data if requested
|
||||
historical_data = None
|
||||
if include_historical:
|
||||
historical_data = []
|
||||
for row in result:
|
||||
historical_data.append(
|
||||
AccuracyLatestData(
|
||||
date=row["execution_date"],
|
||||
daily_score=(
|
||||
float(row["daily_score"])
|
||||
if row["daily_score"] is not None
|
||||
else None
|
||||
),
|
||||
three_day_avg=(
|
||||
float(row["three_day_avg"])
|
||||
if row["three_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
seven_day_avg=(
|
||||
float(row["seven_day_avg"])
|
||||
if row["seven_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
fourteen_day_avg=(
|
||||
float(row["fourteen_day_avg"])
|
||||
if row["fourteen_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return AccuracyTrendsResponse(
|
||||
latest_data=AccuracyLatestData(
|
||||
date=latest["execution_date"],
|
||||
daily_score=(
|
||||
float(latest["daily_score"])
|
||||
if latest["daily_score"] is not None
|
||||
else None
|
||||
),
|
||||
three_day_avg=(
|
||||
float(latest["three_day_avg"])
|
||||
if latest["three_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
seven_day_avg=(
|
||||
float(latest["seven_day_avg"])
|
||||
if latest["seven_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
fourteen_day_avg=(
|
||||
float(latest["fourteen_day_avg"])
|
||||
if latest["fourteen_day_avg"] is not None
|
||||
else None
|
||||
),
|
||||
),
|
||||
alert=alert,
|
||||
historical_data=historical_data,
|
||||
)
|
||||
|
||||
|
||||
class MarketplaceGraphData(BaseModel):
|
||||
"""Data structure for marketplace graph monitoring."""
|
||||
|
||||
graph_id: str
|
||||
user_id: Optional[str]
|
||||
execution_count: int
|
||||
|
||||
|
||||
async def get_marketplace_graphs_for_monitoring(
|
||||
days_back: int = 30,
|
||||
min_executions: int = 10,
|
||||
) -> list[MarketplaceGraphData]:
|
||||
"""Get published marketplace graphs with recent executions for monitoring."""
|
||||
query_template = """
|
||||
WITH marketplace_graphs AS (
|
||||
SELECT DISTINCT
|
||||
slv."agentGraphId" as graph_id,
|
||||
slv."agentGraphVersion" as graph_version
|
||||
FROM {schema_prefix}"StoreListing" sl
|
||||
JOIN {schema_prefix}"StoreListingVersion" slv ON sl."activeVersionId" = slv."id"
|
||||
WHERE sl."hasApprovedVersion" = true
|
||||
AND sl."isDeleted" = false
|
||||
)
|
||||
SELECT DISTINCT
|
||||
mg.graph_id,
|
||||
NULL as user_id, -- Marketplace graphs don't have a specific user_id for monitoring
|
||||
COUNT(*) as execution_count
|
||||
FROM marketplace_graphs mg
|
||||
JOIN {schema_prefix}"AgentGraphExecution" e ON e."agentGraphId" = mg.graph_id
|
||||
WHERE e."createdAt" >= $1::timestamp
|
||||
AND e."isDeleted" = false
|
||||
AND e."executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED')
|
||||
GROUP BY mg.graph_id
|
||||
HAVING COUNT(*) >= $2
|
||||
ORDER BY execution_count DESC
|
||||
"""
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days_back)
|
||||
result = await query_raw_with_schema(query_template, start_date, min_executions)
|
||||
|
||||
return [
|
||||
MarketplaceGraphData(
|
||||
graph_id=row["graph_id"],
|
||||
user_id=row["user_id"],
|
||||
execution_count=int(row["execution_count"]),
|
||||
)
|
||||
for row in result
|
||||
]
|
||||
|
||||
@@ -601,18 +601,14 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
async for output_name, output_data in self._execute(input_data, **kwargs):
|
||||
yield output_name, output_data
|
||||
except Exception as ex:
|
||||
if isinstance(ex, BlockError):
|
||||
raise ex
|
||||
else:
|
||||
raise (
|
||||
BlockExecutionError
|
||||
if isinstance(ex, ValueError)
|
||||
else BlockUnknownError
|
||||
)(
|
||||
if not isinstance(ex, BlockError):
|
||||
raise BlockUnknownError(
|
||||
message=str(ex),
|
||||
block_name=self.name,
|
||||
block_id=self.id,
|
||||
) from ex
|
||||
else:
|
||||
raise ex
|
||||
|
||||
async def _execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
|
||||
if error := self.input_schema.validate_data(input_data):
|
||||
|
||||
@@ -1465,35 +1465,3 @@ async def get_graph_execution_by_share_token(
|
||||
created_at=execution.createdAt,
|
||||
outputs=outputs,
|
||||
)
|
||||
|
||||
|
||||
async def get_frequently_executed_graphs(
|
||||
days_back: int = 30,
|
||||
min_executions: int = 10,
|
||||
) -> list[dict]:
|
||||
"""Get graphs that have been frequently executed for monitoring."""
|
||||
query_template = """
|
||||
SELECT DISTINCT
|
||||
e."agentGraphId" as graph_id,
|
||||
e."userId" as user_id,
|
||||
COUNT(*) as execution_count
|
||||
FROM {schema_prefix}"AgentGraphExecution" e
|
||||
WHERE e."createdAt" >= $1::timestamp
|
||||
AND e."isDeleted" = false
|
||||
AND e."executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED')
|
||||
GROUP BY e."agentGraphId", e."userId"
|
||||
HAVING COUNT(*) >= $2
|
||||
ORDER BY execution_count DESC
|
||||
"""
|
||||
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=days_back)
|
||||
result = await query_raw_with_schema(query_template, start_date, min_executions)
|
||||
|
||||
return [
|
||||
{
|
||||
"graph_id": row["graph_id"],
|
||||
"user_id": row["user_id"],
|
||||
"execution_count": int(row["execution_count"]),
|
||||
}
|
||||
for row in result
|
||||
]
|
||||
|
||||
@@ -22,7 +22,7 @@ from typing import (
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
from prisma.enums import CreditTransactionType, OnboardingStep
|
||||
from prisma.enums import CreditTransactionType
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
@@ -868,20 +868,3 @@ class UserExecutionSummaryStats(BaseModel):
|
||||
total_execution_time: float = Field(default=0)
|
||||
average_execution_time: float = Field(default=0)
|
||||
cost_breakdown: dict[str, float] = Field(default_factory=dict)
|
||||
|
||||
|
||||
class UserOnboarding(BaseModel):
|
||||
userId: str
|
||||
completedSteps: list[OnboardingStep]
|
||||
walletShown: bool
|
||||
notified: list[OnboardingStep]
|
||||
rewardedFor: list[OnboardingStep]
|
||||
usageReason: Optional[str]
|
||||
integrations: list[str]
|
||||
otherIntegrations: Optional[str]
|
||||
selectedStoreListingVersionId: Optional[str]
|
||||
agentInput: Optional[dict[str, Any]]
|
||||
onboardingAgentExecutionId: Optional[str]
|
||||
agentRuns: int
|
||||
lastRunAt: Optional[datetime]
|
||||
consecutiveRunDays: int
|
||||
|
||||
@@ -2,7 +2,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import AsyncGenerator
|
||||
|
||||
from pydantic import BaseModel, field_serializer
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.event_bus import AsyncRedisEventBus
|
||||
from backend.server.model import NotificationPayload
|
||||
@@ -15,11 +15,6 @@ class NotificationEvent(BaseModel):
|
||||
user_id: str
|
||||
payload: NotificationPayload
|
||||
|
||||
@field_serializer("payload")
|
||||
def serialize_payload(self, payload: NotificationPayload):
|
||||
"""Ensure extra fields survive Redis serialization."""
|
||||
return payload.model_dump()
|
||||
|
||||
|
||||
class AsyncRedisNotificationEventBus(AsyncRedisEventBus[NotificationEvent]):
|
||||
Model = NotificationEvent # type: ignore
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import re
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Literal, Optional
|
||||
from zoneinfo import ZoneInfo
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional
|
||||
|
||||
import prisma
|
||||
import pydantic
|
||||
@@ -9,18 +8,17 @@ from prisma.enums import OnboardingStep
|
||||
from prisma.models import UserOnboarding
|
||||
from prisma.types import UserOnboardingCreateInput, UserOnboardingUpdateInput
|
||||
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data.block import get_blocks
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.notification_bus import (
|
||||
AsyncRedisNotificationEventBus,
|
||||
NotificationEvent,
|
||||
)
|
||||
from backend.data.user import get_user_by_id
|
||||
from backend.server.model import OnboardingNotificationPayload
|
||||
from backend.server.v2.store.model import StoreAgentDetails
|
||||
from backend.util.cache import cached
|
||||
from backend.util.json import SafeJson
|
||||
from backend.util.timezone_utils import get_user_timezone_or_utc
|
||||
|
||||
# Mapping from user reason id to categories to search for when choosing agent to show
|
||||
REASON_MAPPING: dict[str, list[str]] = {
|
||||
@@ -33,20 +31,9 @@ REASON_MAPPING: dict[str, list[str]] = {
|
||||
POINTS_AGENT_COUNT = 50 # Number of agents to calculate points for
|
||||
MIN_AGENT_COUNT = 2 # Minimum number of marketplace agents to enable onboarding
|
||||
|
||||
FrontendOnboardingStep = Literal[
|
||||
OnboardingStep.WELCOME,
|
||||
OnboardingStep.USAGE_REASON,
|
||||
OnboardingStep.INTEGRATIONS,
|
||||
OnboardingStep.AGENT_CHOICE,
|
||||
OnboardingStep.AGENT_NEW_RUN,
|
||||
OnboardingStep.AGENT_INPUT,
|
||||
OnboardingStep.CONGRATS,
|
||||
OnboardingStep.MARKETPLACE_VISIT,
|
||||
OnboardingStep.BUILDER_OPEN,
|
||||
]
|
||||
|
||||
|
||||
class UserOnboardingUpdate(pydantic.BaseModel):
|
||||
completedSteps: Optional[list[OnboardingStep]] = None
|
||||
walletShown: Optional[bool] = None
|
||||
notified: Optional[list[OnboardingStep]] = None
|
||||
usageReason: Optional[str] = None
|
||||
@@ -55,6 +42,9 @@ class UserOnboardingUpdate(pydantic.BaseModel):
|
||||
selectedStoreListingVersionId: Optional[str] = None
|
||||
agentInput: Optional[dict[str, Any]] = None
|
||||
onboardingAgentExecutionId: Optional[str] = None
|
||||
agentRuns: Optional[int] = None
|
||||
lastRunAt: Optional[datetime] = None
|
||||
consecutiveRunDays: Optional[int] = None
|
||||
|
||||
|
||||
async def get_user_onboarding(user_id: str):
|
||||
@@ -93,6 +83,14 @@ async def reset_user_onboarding(user_id: str):
|
||||
async def update_user_onboarding(user_id: str, data: UserOnboardingUpdate):
|
||||
update: UserOnboardingUpdateInput = {}
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
if data.completedSteps is not None:
|
||||
update["completedSteps"] = list(
|
||||
set(data.completedSteps + onboarding.completedSteps)
|
||||
)
|
||||
for step in data.completedSteps:
|
||||
if step not in onboarding.completedSteps:
|
||||
await _reward_user(user_id, onboarding, step)
|
||||
await _send_onboarding_notification(user_id, step)
|
||||
if data.walletShown:
|
||||
update["walletShown"] = data.walletShown
|
||||
if data.notified is not None:
|
||||
@@ -109,6 +107,12 @@ async def update_user_onboarding(user_id: str, data: UserOnboardingUpdate):
|
||||
update["agentInput"] = SafeJson(data.agentInput)
|
||||
if data.onboardingAgentExecutionId is not None:
|
||||
update["onboardingAgentExecutionId"] = data.onboardingAgentExecutionId
|
||||
if data.agentRuns is not None and data.agentRuns > onboarding.agentRuns:
|
||||
update["agentRuns"] = data.agentRuns
|
||||
if data.lastRunAt is not None:
|
||||
update["lastRunAt"] = data.lastRunAt
|
||||
if data.consecutiveRunDays is not None:
|
||||
update["consecutiveRunDays"] = data.consecutiveRunDays
|
||||
|
||||
return await UserOnboarding.prisma().upsert(
|
||||
where={"userId": user_id},
|
||||
@@ -157,12 +161,14 @@ async def _reward_user(user_id: str, onboarding: UserOnboarding, step: Onboardin
|
||||
if step in onboarding.rewardedFor:
|
||||
return
|
||||
|
||||
onboarding.rewardedFor.append(step)
|
||||
user_credit_model = await get_user_credit_model(user_id)
|
||||
await user_credit_model.onboarding_reward(user_id, reward, step)
|
||||
await UserOnboarding.prisma().update(
|
||||
where={"userId": user_id},
|
||||
data={
|
||||
"rewardedFor": list(set(onboarding.rewardedFor + [step])),
|
||||
"completedSteps": list(set(onboarding.completedSteps + [step])),
|
||||
"rewardedFor": onboarding.rewardedFor,
|
||||
},
|
||||
)
|
||||
|
||||
@@ -171,52 +177,31 @@ async def complete_onboarding_step(user_id: str, step: OnboardingStep):
|
||||
"""
|
||||
Completes the specified onboarding step for the user if not already completed.
|
||||
"""
|
||||
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
if step not in onboarding.completedSteps:
|
||||
await UserOnboarding.prisma().update(
|
||||
where={"userId": user_id},
|
||||
data={
|
||||
"completedSteps": list(set(onboarding.completedSteps + [step])),
|
||||
},
|
||||
await update_user_onboarding(
|
||||
user_id,
|
||||
UserOnboardingUpdate(completedSteps=onboarding.completedSteps + [step]),
|
||||
)
|
||||
await _reward_user(user_id, onboarding, step)
|
||||
await _send_onboarding_notification(user_id, step)
|
||||
|
||||
|
||||
async def _send_onboarding_notification(
|
||||
user_id: str, step: OnboardingStep | None, event: str = "step_completed"
|
||||
):
|
||||
async def _send_onboarding_notification(user_id: str, step: OnboardingStep):
|
||||
"""
|
||||
Sends an onboarding notification to the user.
|
||||
Sends an onboarding notification to the user for the specified step.
|
||||
"""
|
||||
payload = OnboardingNotificationPayload(
|
||||
type="onboarding",
|
||||
event=event,
|
||||
step=step,
|
||||
event="step_completed",
|
||||
step=step.value,
|
||||
)
|
||||
await AsyncRedisNotificationEventBus().publish(
|
||||
NotificationEvent(user_id=user_id, payload=payload)
|
||||
)
|
||||
|
||||
|
||||
async def complete_re_run_agent(user_id: str, graph_id: str) -> None:
|
||||
"""
|
||||
Complete RE_RUN_AGENT step when a user runs a graph they've run before.
|
||||
Keeps overhead low by only counting executions if the step is still pending.
|
||||
"""
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
if OnboardingStep.RE_RUN_AGENT in onboarding.completedSteps:
|
||||
return
|
||||
|
||||
# Includes current execution, so count > 1 means there was at least one prior run.
|
||||
previous_exec_count = await execution_db.get_graph_executions_count(
|
||||
user_id=user_id, graph_id=graph_id
|
||||
)
|
||||
if previous_exec_count > 1:
|
||||
await complete_onboarding_step(user_id, OnboardingStep.RE_RUN_AGENT)
|
||||
|
||||
|
||||
def _clean_and_split(text: str) -> list[str]:
|
||||
def clean_and_split(text: str) -> list[str]:
|
||||
"""
|
||||
Removes all special characters from a string, truncates it to 100 characters,
|
||||
and splits it by whitespace and commas.
|
||||
@@ -239,7 +224,7 @@ def _clean_and_split(text: str) -> list[str]:
|
||||
return words
|
||||
|
||||
|
||||
def _calculate_points(
|
||||
def calculate_points(
|
||||
agent, categories: list[str], custom: list[str], integrations: list[str]
|
||||
) -> int:
|
||||
"""
|
||||
@@ -283,85 +268,18 @@ def _calculate_points(
|
||||
return int(points)
|
||||
|
||||
|
||||
def _normalize_datetime(value: datetime | None) -> datetime | None:
|
||||
if value is None:
|
||||
return None
|
||||
if value.tzinfo is None:
|
||||
return value.replace(tzinfo=timezone.utc)
|
||||
return value.astimezone(timezone.utc)
|
||||
def get_credentials_blocks() -> dict[str, str]:
|
||||
# Returns a dictionary of block id to credentials field name
|
||||
creds: dict[str, str] = {}
|
||||
blocks = get_blocks()
|
||||
for id, block in blocks.items():
|
||||
for field_name, field_info in block().input_schema.model_fields.items():
|
||||
if field_info.annotation == CredentialsMetaInput:
|
||||
creds[id] = field_name
|
||||
return creds
|
||||
|
||||
|
||||
def _calculate_consecutive_run_days(
|
||||
last_run_at: datetime | None, current_consecutive_days: int, user_timezone: str
|
||||
) -> tuple[datetime, int]:
|
||||
tz = ZoneInfo(user_timezone)
|
||||
local_now = datetime.now(tz)
|
||||
normalized_last_run = _normalize_datetime(last_run_at)
|
||||
|
||||
if normalized_last_run is None:
|
||||
return local_now.astimezone(timezone.utc), 1
|
||||
|
||||
last_run_local = normalized_last_run.astimezone(tz)
|
||||
last_run_date = last_run_local.date()
|
||||
today = local_now.date()
|
||||
|
||||
if last_run_date == today:
|
||||
return local_now.astimezone(timezone.utc), current_consecutive_days
|
||||
|
||||
if last_run_date == today - timedelta(days=1):
|
||||
return local_now.astimezone(timezone.utc), current_consecutive_days + 1
|
||||
|
||||
return local_now.astimezone(timezone.utc), 1
|
||||
|
||||
|
||||
def _get_run_milestone_steps(
|
||||
new_run_count: int, consecutive_days: int
|
||||
) -> list[OnboardingStep]:
|
||||
milestones: list[OnboardingStep] = []
|
||||
if new_run_count >= 10:
|
||||
milestones.append(OnboardingStep.RUN_AGENTS)
|
||||
if new_run_count >= 100:
|
||||
milestones.append(OnboardingStep.RUN_AGENTS_100)
|
||||
if consecutive_days >= 3:
|
||||
milestones.append(OnboardingStep.RUN_3_DAYS)
|
||||
if consecutive_days >= 14:
|
||||
milestones.append(OnboardingStep.RUN_14_DAYS)
|
||||
return milestones
|
||||
|
||||
|
||||
async def _get_user_timezone(user_id: str) -> str:
|
||||
user = await get_user_by_id(user_id)
|
||||
return get_user_timezone_or_utc(user.timezone if user else None)
|
||||
|
||||
|
||||
async def increment_runs(user_id: str):
|
||||
"""
|
||||
Increment a user's run counters and trigger any onboarding milestones.
|
||||
"""
|
||||
user_timezone = await _get_user_timezone(user_id)
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
new_run_count = onboarding.agentRuns + 1
|
||||
last_run_at, consecutive_run_days = _calculate_consecutive_run_days(
|
||||
onboarding.lastRunAt, onboarding.consecutiveRunDays, user_timezone
|
||||
)
|
||||
|
||||
await UserOnboarding.prisma().update(
|
||||
where={"userId": user_id},
|
||||
data={
|
||||
"agentRuns": {"increment": 1},
|
||||
"lastRunAt": last_run_at,
|
||||
"consecutiveRunDays": consecutive_run_days,
|
||||
},
|
||||
)
|
||||
|
||||
milestones = _get_run_milestone_steps(new_run_count, consecutive_run_days)
|
||||
new_steps = [step for step in milestones if step not in onboarding.completedSteps]
|
||||
|
||||
for step in new_steps:
|
||||
await complete_onboarding_step(user_id, step)
|
||||
# Send progress notification if no steps were completed, so client refetches onboarding state
|
||||
if not new_steps:
|
||||
await _send_onboarding_notification(user_id, None, event="increment_runs")
|
||||
CREDENTIALS_FIELDS: dict[str, str] = get_credentials_blocks()
|
||||
|
||||
|
||||
async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]:
|
||||
@@ -370,7 +288,7 @@ async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]:
|
||||
|
||||
where_clause: dict[str, Any] = {}
|
||||
|
||||
custom = _clean_and_split((user_onboarding.usageReason or "").lower())
|
||||
custom = clean_and_split((user_onboarding.usageReason or "").lower())
|
||||
|
||||
if categories:
|
||||
where_clause["OR"] = [
|
||||
@@ -418,7 +336,7 @@ async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]:
|
||||
# Calculate points for the first X agents and choose the top 2
|
||||
agent_points = []
|
||||
for agent in storeAgents[:POINTS_AGENT_COUNT]:
|
||||
points = _calculate_points(
|
||||
points = calculate_points(
|
||||
agent, categories, custom, user_onboarding.integrations
|
||||
)
|
||||
agent_points.append((agent, points))
|
||||
@@ -432,7 +350,6 @@ async def get_recommended_agents(user_id: str) -> list[StoreAgentDetails]:
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
agent_video=agent.agent_video or "",
|
||||
agent_output_demo=agent.agent_output_demo or "",
|
||||
agent_image=agent.agent_image,
|
||||
creator=agent.creator_username,
|
||||
creator_avatar=agent.creator_avatar,
|
||||
|
||||
@@ -3,17 +3,12 @@ from contextlib import asynccontextmanager
|
||||
from typing import TYPE_CHECKING, Callable, Concatenate, ParamSpec, TypeVar, cast
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.analytics import (
|
||||
get_accuracy_trends_and_alerts,
|
||||
get_marketplace_graphs_for_monitoring,
|
||||
)
|
||||
from backend.data.credit import UsageTransactionMetadata, get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
create_graph_execution,
|
||||
get_block_error_stats,
|
||||
get_child_graph_executions,
|
||||
get_execution_kv_data,
|
||||
get_frequently_executed_graphs,
|
||||
get_graph_execution_meta,
|
||||
get_graph_executions,
|
||||
get_graph_executions_count,
|
||||
@@ -150,9 +145,6 @@ class DatabaseManager(AppService):
|
||||
get_execution_kv_data = _(get_execution_kv_data)
|
||||
set_execution_kv_data = _(set_execution_kv_data)
|
||||
get_block_error_stats = _(get_block_error_stats)
|
||||
get_accuracy_trends_and_alerts = _(get_accuracy_trends_and_alerts)
|
||||
get_frequently_executed_graphs = _(get_frequently_executed_graphs)
|
||||
get_marketplace_graphs_for_monitoring = _(get_marketplace_graphs_for_monitoring)
|
||||
|
||||
# Graphs
|
||||
get_node = _(get_node)
|
||||
@@ -234,10 +226,6 @@ class DatabaseManagerClient(AppServiceClient):
|
||||
|
||||
# Block error monitoring
|
||||
get_block_error_stats = _(d.get_block_error_stats)
|
||||
# Execution accuracy monitoring
|
||||
get_accuracy_trends_and_alerts = _(d.get_accuracy_trends_and_alerts)
|
||||
get_frequently_executed_graphs = _(d.get_frequently_executed_graphs)
|
||||
get_marketplace_graphs_for_monitoring = _(d.get_marketplace_graphs_for_monitoring)
|
||||
|
||||
# Human In The Loop
|
||||
has_pending_reviews_for_graph_exec = _(d.has_pending_reviews_for_graph_exec)
|
||||
|
||||
@@ -26,14 +26,12 @@ from sqlalchemy import MetaData, create_engine
|
||||
from backend.data.block import BlockInput
|
||||
from backend.data.execution import GraphExecutionWithNodes
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.onboarding import increment_runs
|
||||
from backend.executor import utils as execution_utils
|
||||
from backend.monitoring import (
|
||||
NotificationJobArgs,
|
||||
process_existing_batches,
|
||||
process_weekly_summary,
|
||||
report_block_error_rates,
|
||||
report_execution_accuracy_alerts,
|
||||
report_late_executions,
|
||||
)
|
||||
from backend.util.clients import get_scheduler_client
|
||||
@@ -155,7 +153,6 @@ async def _execute_graph(**kwargs):
|
||||
inputs=args.input_data,
|
||||
graph_credentials_inputs=args.input_credentials,
|
||||
)
|
||||
await increment_runs(args.user_id)
|
||||
elapsed = asyncio.get_event_loop().time() - start_time
|
||||
logger.info(
|
||||
f"Graph execution started with ID {graph_exec.id} for graph {args.graph_id} "
|
||||
@@ -242,11 +239,6 @@ def cleanup_expired_files():
|
||||
run_async(cleanup_expired_files_async())
|
||||
|
||||
|
||||
def execution_accuracy_alerts():
|
||||
"""Check execution accuracy and send alerts if drops are detected."""
|
||||
return report_execution_accuracy_alerts()
|
||||
|
||||
|
||||
# Monitoring functions are now imported from monitoring module
|
||||
|
||||
|
||||
@@ -446,17 +438,6 @@ class Scheduler(AppService):
|
||||
jobstore=Jobstores.EXECUTION.value,
|
||||
)
|
||||
|
||||
# Execution Accuracy Monitoring - configurable interval
|
||||
self.scheduler.add_job(
|
||||
execution_accuracy_alerts,
|
||||
id="report_execution_accuracy_alerts",
|
||||
trigger="interval",
|
||||
replace_existing=True,
|
||||
seconds=config.execution_accuracy_check_interval_hours
|
||||
* 3600, # Convert hours to seconds
|
||||
jobstore=Jobstores.EXECUTION.value,
|
||||
)
|
||||
|
||||
self.scheduler.add_listener(job_listener, EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)
|
||||
self.scheduler.add_listener(job_missed_listener, EVENT_JOB_MISSED)
|
||||
self.scheduler.add_listener(job_max_instances_listener, EVENT_JOB_MAX_INSTANCES)
|
||||
@@ -604,11 +585,6 @@ class Scheduler(AppService):
|
||||
"""Manually trigger cleanup of expired cloud storage files."""
|
||||
return cleanup_expired_files()
|
||||
|
||||
@expose
|
||||
def execute_report_execution_accuracy_alerts(self):
|
||||
"""Manually trigger execution accuracy alert checking."""
|
||||
return execution_accuracy_alerts()
|
||||
|
||||
|
||||
class SchedulerClient(AppServiceClient):
|
||||
@classmethod
|
||||
|
||||
156
autogpt_platform/backend/backend/integrations/embeddings.py
Normal file
156
autogpt_platform/backend/backend/integrations/embeddings.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""
|
||||
Embedding service for generating text embeddings using OpenAI.
|
||||
|
||||
Used for vector-based semantic search in the store.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import openai
|
||||
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Model configuration
|
||||
# Using text-embedding-3-small (1536 dimensions) for compatibility with pgvector indexes
|
||||
# pgvector IVFFlat/HNSW indexes have dimension limits (2000 for IVFFlat, varies for HNSW)
|
||||
EMBEDDING_MODEL = "text-embedding-3-small"
|
||||
EMBEDDING_DIMENSIONS = 1536
|
||||
|
||||
# Input validation limits
|
||||
# OpenAI text-embedding-3-large supports up to 8191 tokens (~32k chars)
|
||||
# We set a conservative limit to prevent abuse
|
||||
MAX_TEXT_LENGTH = 10000 # characters
|
||||
MAX_BATCH_SIZE = 100 # maximum texts per batch request
|
||||
|
||||
|
||||
class EmbeddingService:
|
||||
"""Service for generating text embeddings using OpenAI."""
|
||||
|
||||
def __init__(self, api_key: Optional[str] = None):
|
||||
settings = Settings()
|
||||
self.api_key = (
|
||||
api_key
|
||||
or settings.secrets.openai_internal_api_key
|
||||
or settings.secrets.openai_api_key
|
||||
)
|
||||
if not self.api_key:
|
||||
raise ValueError(
|
||||
"OpenAI API key not configured. "
|
||||
"Set OPENAI_API_KEY or OPENAI_INTERNAL_API_KEY environment variable."
|
||||
)
|
||||
self.client = openai.AsyncOpenAI(api_key=self.api_key)
|
||||
|
||||
async def generate_embedding(self, text: str) -> list[float]:
|
||||
"""
|
||||
Generate embedding for a single text string.
|
||||
|
||||
Args:
|
||||
text: The text to generate an embedding for.
|
||||
|
||||
Returns:
|
||||
A list of floats representing the embedding vector.
|
||||
|
||||
Raises:
|
||||
ValueError: If the text is empty or exceeds maximum length.
|
||||
openai.APIError: If the OpenAI API call fails.
|
||||
"""
|
||||
# Input validation
|
||||
if not text or not text.strip():
|
||||
raise ValueError("Text cannot be empty")
|
||||
if len(text) > MAX_TEXT_LENGTH:
|
||||
raise ValueError(
|
||||
f"Text exceeds maximum length of {MAX_TEXT_LENGTH} characters"
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self.client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=text,
|
||||
dimensions=EMBEDDING_DIMENSIONS,
|
||||
)
|
||||
return response.data[0].embedding
|
||||
except openai.APIError as e:
|
||||
logger.error(f"OpenAI API error generating embedding: {e}")
|
||||
raise
|
||||
|
||||
async def generate_embeddings(self, texts: list[str]) -> list[list[float]]:
|
||||
"""
|
||||
Generate embeddings for multiple texts (batch).
|
||||
|
||||
Args:
|
||||
texts: List of texts to generate embeddings for.
|
||||
|
||||
Returns:
|
||||
List of embedding vectors, one per input text.
|
||||
|
||||
Raises:
|
||||
ValueError: If any text is invalid or batch size exceeds limit.
|
||||
openai.APIError: If the OpenAI API call fails.
|
||||
"""
|
||||
# Input validation
|
||||
if not texts:
|
||||
raise ValueError("Texts list cannot be empty")
|
||||
if len(texts) > MAX_BATCH_SIZE:
|
||||
raise ValueError(f"Batch size exceeds maximum of {MAX_BATCH_SIZE} texts")
|
||||
for i, text in enumerate(texts):
|
||||
if not text or not text.strip():
|
||||
raise ValueError(f"Text at index {i} cannot be empty")
|
||||
if len(text) > MAX_TEXT_LENGTH:
|
||||
raise ValueError(
|
||||
f"Text at index {i} exceeds maximum length of {MAX_TEXT_LENGTH} characters"
|
||||
)
|
||||
|
||||
try:
|
||||
response = await self.client.embeddings.create(
|
||||
model=EMBEDDING_MODEL,
|
||||
input=texts,
|
||||
dimensions=EMBEDDING_DIMENSIONS,
|
||||
)
|
||||
# Sort by index to ensure correct ordering
|
||||
sorted_data = sorted(response.data, key=lambda x: x.index)
|
||||
return [item.embedding for item in sorted_data]
|
||||
except openai.APIError as e:
|
||||
logger.error(f"OpenAI API error generating embeddings: {e}")
|
||||
raise
|
||||
|
||||
|
||||
def create_search_text(name: str, sub_heading: str, description: str) -> str:
|
||||
"""
|
||||
Combine fields into searchable text for embedding.
|
||||
|
||||
This creates a single text string from the agent's name, sub-heading,
|
||||
and description, which is then converted to an embedding vector.
|
||||
|
||||
Args:
|
||||
name: The agent name.
|
||||
sub_heading: The agent sub-heading/tagline.
|
||||
description: The agent description.
|
||||
|
||||
Returns:
|
||||
A single string combining all non-empty fields.
|
||||
"""
|
||||
parts = [name or "", sub_heading or "", description or ""]
|
||||
return " ".join(filter(None, parts)).strip()
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_embedding_service: Optional[EmbeddingService] = None
|
||||
|
||||
|
||||
async def get_embedding_service() -> EmbeddingService:
|
||||
"""
|
||||
Get or create the embedding service singleton.
|
||||
|
||||
Returns:
|
||||
The shared EmbeddingService instance.
|
||||
|
||||
Raises:
|
||||
ValueError: If OpenAI API key is not configured.
|
||||
"""
|
||||
global _embedding_service
|
||||
if _embedding_service is None:
|
||||
_embedding_service = EmbeddingService()
|
||||
return _embedding_service
|
||||
231
autogpt_platform/backend/backend/integrations/embeddings_test.py
Normal file
231
autogpt_platform/backend/backend/integrations/embeddings_test.py
Normal file
@@ -0,0 +1,231 @@
|
||||
"""Tests for the embedding service.
|
||||
|
||||
This module tests:
|
||||
- create_search_text utility function
|
||||
- EmbeddingService input validation
|
||||
- EmbeddingService API interaction (mocked)
|
||||
"""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.integrations.embeddings import (
|
||||
EMBEDDING_DIMENSIONS,
|
||||
MAX_BATCH_SIZE,
|
||||
MAX_TEXT_LENGTH,
|
||||
EmbeddingService,
|
||||
create_search_text,
|
||||
)
|
||||
|
||||
|
||||
class TestCreateSearchText:
|
||||
"""Tests for the create_search_text utility function."""
|
||||
|
||||
def test_combines_all_fields(self):
|
||||
result = create_search_text("Agent Name", "A cool agent", "Does amazing things")
|
||||
assert result == "Agent Name A cool agent Does amazing things"
|
||||
|
||||
def test_handles_empty_name(self):
|
||||
result = create_search_text("", "Sub heading", "Description")
|
||||
assert result == "Sub heading Description"
|
||||
|
||||
def test_handles_empty_sub_heading(self):
|
||||
result = create_search_text("Name", "", "Description")
|
||||
assert result == "Name Description"
|
||||
|
||||
def test_handles_empty_description(self):
|
||||
result = create_search_text("Name", "Sub heading", "")
|
||||
assert result == "Name Sub heading"
|
||||
|
||||
def test_handles_all_empty(self):
|
||||
result = create_search_text("", "", "")
|
||||
assert result == ""
|
||||
|
||||
def test_handles_none_values(self):
|
||||
# The function expects strings but should handle None gracefully
|
||||
result = create_search_text(None, None, None) # type: ignore
|
||||
assert result == ""
|
||||
|
||||
def test_preserves_content_strips_outer_whitespace(self):
|
||||
# The function joins parts and strips the outer result
|
||||
# Internal whitespace in each part is preserved
|
||||
result = create_search_text(" Name ", " Sub ", " Desc ")
|
||||
# Each part is joined with space, then outer strip applied
|
||||
assert result == "Name Sub Desc"
|
||||
|
||||
def test_handles_only_whitespace(self):
|
||||
# Parts that are only whitespace become empty after filter
|
||||
result = create_search_text(" ", " ", " ")
|
||||
assert result == ""
|
||||
|
||||
|
||||
class TestEmbeddingServiceValidation:
|
||||
"""Tests for EmbeddingService input validation."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(self):
|
||||
"""Mock settings with a test API key."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = "test-api-key"
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
yield mock
|
||||
|
||||
@pytest.fixture
|
||||
def service(self, mock_settings):
|
||||
"""Create an EmbeddingService instance with mocked settings."""
|
||||
with patch("backend.integrations.embeddings.openai.AsyncOpenAI"):
|
||||
return EmbeddingService()
|
||||
|
||||
def test_init_requires_api_key(self):
|
||||
"""Test that initialization fails without an API key."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = ""
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
|
||||
with pytest.raises(ValueError, match="OpenAI API key not configured"):
|
||||
EmbeddingService()
|
||||
|
||||
def test_init_accepts_explicit_api_key(self):
|
||||
"""Test that explicit API key overrides settings."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = ""
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock.return_value = mock_instance
|
||||
|
||||
with patch("backend.integrations.embeddings.openai.AsyncOpenAI"):
|
||||
service = EmbeddingService(api_key="explicit-key")
|
||||
assert service.api_key == "explicit-key"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_empty_text(self, service):
|
||||
"""Test that empty text raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Text cannot be empty"):
|
||||
await service.generate_embedding("")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_whitespace_only(self, service):
|
||||
"""Test that whitespace-only text raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Text cannot be empty"):
|
||||
await service.generate_embedding(" ")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_exceeds_max_length(self, service):
|
||||
"""Test that text exceeding max length raises ValueError."""
|
||||
long_text = "a" * (MAX_TEXT_LENGTH + 1)
|
||||
with pytest.raises(ValueError, match="exceeds maximum length"):
|
||||
await service.generate_embedding(long_text)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_empty_list(self, service):
|
||||
"""Test that empty list raises ValueError."""
|
||||
with pytest.raises(ValueError, match="Texts list cannot be empty"):
|
||||
await service.generate_embeddings([])
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_exceeds_batch_size(self, service):
|
||||
"""Test that batch exceeding max size raises ValueError."""
|
||||
texts = ["text"] * (MAX_BATCH_SIZE + 1)
|
||||
with pytest.raises(ValueError, match="Batch size exceeds maximum"):
|
||||
await service.generate_embeddings(texts)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_empty_text_in_batch(self, service):
|
||||
"""Test that empty text in batch raises ValueError with index."""
|
||||
with pytest.raises(ValueError, match="Text at index 1 cannot be empty"):
|
||||
await service.generate_embeddings(["valid", "", "also valid"])
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_long_text_in_batch(self, service):
|
||||
"""Test that long text in batch raises ValueError with index."""
|
||||
long_text = "a" * (MAX_TEXT_LENGTH + 1)
|
||||
with pytest.raises(ValueError, match="Text at index 2 exceeds maximum length"):
|
||||
await service.generate_embeddings(["short", "also short", long_text])
|
||||
|
||||
|
||||
class TestEmbeddingServiceAPI:
|
||||
"""Tests for EmbeddingService API interaction."""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_openai_client(self):
|
||||
"""Create a mock OpenAI client."""
|
||||
mock_client = MagicMock()
|
||||
mock_client.embeddings = MagicMock()
|
||||
return mock_client
|
||||
|
||||
@pytest.fixture
|
||||
def service_with_mock_client(self, mock_openai_client):
|
||||
"""Create an EmbeddingService with a mocked OpenAI client."""
|
||||
with patch("backend.integrations.embeddings.Settings") as mock_settings:
|
||||
mock_instance = MagicMock()
|
||||
mock_instance.secrets.openai_internal_api_key = "test-key"
|
||||
mock_instance.secrets.openai_api_key = ""
|
||||
mock_settings.return_value = mock_instance
|
||||
|
||||
with patch(
|
||||
"backend.integrations.embeddings.openai.AsyncOpenAI"
|
||||
) as mock_openai:
|
||||
mock_openai.return_value = mock_openai_client
|
||||
service = EmbeddingService()
|
||||
return service, mock_openai_client
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embedding_success(self, service_with_mock_client):
|
||||
"""Test successful embedding generation."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = [MagicMock(embedding=mock_embedding)]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embedding("test text")
|
||||
|
||||
assert result == mock_embedding
|
||||
mock_client.embeddings.create.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_success(self, service_with_mock_client):
|
||||
"""Test successful batch embedding generation."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response with multiple embeddings
|
||||
mock_embeddings = [[0.1] * EMBEDDING_DIMENSIONS, [0.2] * EMBEDDING_DIMENSIONS]
|
||||
mock_response = MagicMock()
|
||||
mock_response.data = [
|
||||
MagicMock(embedding=mock_embeddings[0], index=0),
|
||||
MagicMock(embedding=mock_embeddings[1], index=1),
|
||||
]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embeddings(["text1", "text2"])
|
||||
|
||||
assert result == mock_embeddings
|
||||
mock_client.embeddings.create.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_embeddings_preserves_order(self, service_with_mock_client):
|
||||
"""Test that batch embeddings are returned in correct order even if API returns out of order."""
|
||||
service, mock_client = service_with_mock_client
|
||||
|
||||
# Create mock response with embeddings out of order
|
||||
mock_embeddings = [[0.1] * EMBEDDING_DIMENSIONS, [0.2] * EMBEDDING_DIMENSIONS]
|
||||
mock_response = MagicMock()
|
||||
# Return in reverse order
|
||||
mock_response.data = [
|
||||
MagicMock(embedding=mock_embeddings[1], index=1),
|
||||
MagicMock(embedding=mock_embeddings[0], index=0),
|
||||
]
|
||||
mock_client.embeddings.create = AsyncMock(return_value=mock_response)
|
||||
|
||||
result = await service.generate_embeddings(["text1", "text2"])
|
||||
|
||||
# Should be sorted by index
|
||||
assert result[0] == mock_embeddings[0]
|
||||
assert result[1] == mock_embeddings[1]
|
||||
@@ -18,9 +18,7 @@ class ManualWebhookManagerBase(BaseWebhooksManager[WT]):
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
# TODO: pass ingress_url to user in frontend
|
||||
# See: https://github.com/Significant-Gravitas/AutoGPT/issues/8537
|
||||
logger.debug(f"Manual webhook registered with ingress URL: {ingress_url}")
|
||||
print(ingress_url) # FIXME: pass URL to user in front end
|
||||
|
||||
return "", {}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Monitoring module for platform health and alerting."""
|
||||
|
||||
from .accuracy_monitor import AccuracyMonitor, report_execution_accuracy_alerts
|
||||
from .block_error_monitor import BlockErrorMonitor, report_block_error_rates
|
||||
from .late_execution_monitor import (
|
||||
LateExecutionException,
|
||||
@@ -14,12 +13,10 @@ from .notification_monitor import (
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"AccuracyMonitor",
|
||||
"BlockErrorMonitor",
|
||||
"LateExecutionMonitor",
|
||||
"LateExecutionException",
|
||||
"NotificationJobArgs",
|
||||
"report_execution_accuracy_alerts",
|
||||
"report_block_error_rates",
|
||||
"report_late_executions",
|
||||
"process_existing_batches",
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
"""Execution accuracy monitoring module."""
|
||||
|
||||
import logging
|
||||
|
||||
from backend.util.clients import (
|
||||
get_database_manager_client,
|
||||
get_notification_manager_client,
|
||||
)
|
||||
from backend.util.metrics import DiscordChannel, sentry_capture_error
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
class AccuracyMonitor:
|
||||
"""Monitor execution accuracy trends and send alerts for drops."""
|
||||
|
||||
def __init__(self, drop_threshold: float = 10.0):
|
||||
self.config = config
|
||||
self.notification_client = get_notification_manager_client()
|
||||
self.database_client = get_database_manager_client()
|
||||
self.drop_threshold = drop_threshold
|
||||
|
||||
def check_execution_accuracy_alerts(self) -> str:
|
||||
"""Check marketplace agents for accuracy drops and send alerts."""
|
||||
try:
|
||||
logger.info("Checking execution accuracy for marketplace agents")
|
||||
|
||||
# Get marketplace graphs using database client
|
||||
graphs = self.database_client.get_marketplace_graphs_for_monitoring(
|
||||
days_back=30, min_executions=10
|
||||
)
|
||||
|
||||
alerts_found = 0
|
||||
|
||||
for graph_data in graphs:
|
||||
result = self.database_client.get_accuracy_trends_and_alerts(
|
||||
graph_id=graph_data.graph_id,
|
||||
user_id=graph_data.user_id,
|
||||
days_back=21, # 3 weeks
|
||||
drop_threshold=self.drop_threshold,
|
||||
)
|
||||
|
||||
if result.alert:
|
||||
alert = result.alert
|
||||
|
||||
# Get graph details for better alert info
|
||||
try:
|
||||
graph_info = self.database_client.get_graph_metadata(
|
||||
graph_id=alert.graph_id
|
||||
)
|
||||
graph_name = graph_info.name if graph_info else "Unknown Agent"
|
||||
except Exception:
|
||||
graph_name = "Unknown Agent"
|
||||
|
||||
# Create detailed alert message
|
||||
alert_msg = (
|
||||
f"🚨 **AGENT ACCURACY DROP DETECTED**\n\n"
|
||||
f"**Agent:** {graph_name}\n"
|
||||
f"**Graph ID:** `{alert.graph_id}`\n"
|
||||
f"**Accuracy Drop:** {alert.drop_percent:.1f}%\n"
|
||||
f"**Recent Performance:**\n"
|
||||
f" • 3-day average: {alert.three_day_avg:.1f}%\n"
|
||||
f" • 7-day average: {alert.seven_day_avg:.1f}%\n"
|
||||
)
|
||||
|
||||
if alert.user_id:
|
||||
alert_msg += f"**Owner:** {alert.user_id}\n"
|
||||
|
||||
# Send individual alert for each agent (not batched)
|
||||
self.notification_client.discord_system_alert(
|
||||
alert_msg, DiscordChannel.PRODUCT
|
||||
)
|
||||
alerts_found += 1
|
||||
logger.warning(
|
||||
f"Sent accuracy alert for agent: {graph_name} ({alert.graph_id})"
|
||||
)
|
||||
|
||||
if alerts_found > 0:
|
||||
return f"Alert sent for {alerts_found} agents with accuracy drops"
|
||||
|
||||
logger.info("No execution accuracy alerts detected")
|
||||
return "No accuracy alerts detected"
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error checking execution accuracy alerts: {e}")
|
||||
|
||||
error = Exception(f"Error checking execution accuracy alerts: {e}")
|
||||
msg = str(error)
|
||||
sentry_capture_error(error)
|
||||
self.notification_client.discord_system_alert(msg, DiscordChannel.PRODUCT)
|
||||
return msg
|
||||
|
||||
|
||||
def report_execution_accuracy_alerts(drop_threshold: float = 10.0) -> str:
|
||||
"""
|
||||
Check execution accuracy and send alerts if drops are detected.
|
||||
|
||||
Args:
|
||||
drop_threshold: Percentage drop threshold to trigger alerts (default 10.0%)
|
||||
|
||||
Returns:
|
||||
Status message indicating results of the check
|
||||
"""
|
||||
monitor = AccuracyMonitor(drop_threshold=drop_threshold)
|
||||
return monitor.check_execution_accuracy_alerts()
|
||||
@@ -33,11 +33,7 @@ from backend.data.model import (
|
||||
OAuth2Credentials,
|
||||
UserIntegrations,
|
||||
)
|
||||
from backend.data.onboarding import (
|
||||
OnboardingStep,
|
||||
complete_onboarding_step,
|
||||
increment_runs,
|
||||
)
|
||||
from backend.data.onboarding import OnboardingStep, complete_onboarding_step
|
||||
from backend.data.user import get_user_integrations
|
||||
from backend.executor.utils import add_graph_execution
|
||||
from backend.integrations.ayrshare import AyrshareClient, SocialPlatform
|
||||
@@ -381,7 +377,6 @@ async def webhook_ingress_generic(
|
||||
return
|
||||
|
||||
await complete_onboarding_step(user_id, OnboardingStep.TRIGGER_WEBHOOK)
|
||||
await increment_runs(user_id)
|
||||
|
||||
# Execute all triggers concurrently for better performance
|
||||
tasks = []
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import enum
|
||||
from typing import Any, Literal, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import pydantic
|
||||
from prisma.enums import OnboardingStep
|
||||
|
||||
from backend.data.api_key import APIKeyInfo, APIKeyPermission
|
||||
from backend.data.graph import Graph
|
||||
@@ -36,13 +35,8 @@ class WSSubscribeGraphExecutionsRequest(pydantic.BaseModel):
|
||||
graph_id: str
|
||||
|
||||
|
||||
GraphCreationSource = Literal["builder", "upload"]
|
||||
GraphExecutionSource = Literal["builder", "library", "onboarding"]
|
||||
|
||||
|
||||
class CreateGraph(pydantic.BaseModel):
|
||||
graph: Graph
|
||||
source: GraphCreationSource | None = None
|
||||
|
||||
|
||||
class CreateAPIKeyRequest(pydantic.BaseModel):
|
||||
@@ -89,8 +83,6 @@ class NotificationPayload(pydantic.BaseModel):
|
||||
type: str
|
||||
event: str
|
||||
|
||||
model_config = pydantic.ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class OnboardingNotificationPayload(NotificationPayload):
|
||||
step: OnboardingStep | None
|
||||
step: str
|
||||
|
||||
@@ -5,7 +5,7 @@ import time
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Annotated, Any, Sequence, get_args
|
||||
from typing import Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
import stripe
|
||||
@@ -45,17 +45,12 @@ from backend.data.credit import (
|
||||
set_auto_top_up,
|
||||
)
|
||||
from backend.data.graph import GraphSettings
|
||||
from backend.data.model import CredentialsMetaInput, UserOnboarding
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.notifications import NotificationPreference, NotificationPreferenceDTO
|
||||
from backend.data.onboarding import (
|
||||
FrontendOnboardingStep,
|
||||
OnboardingStep,
|
||||
UserOnboardingUpdate,
|
||||
complete_onboarding_step,
|
||||
complete_re_run_agent,
|
||||
get_recommended_agents,
|
||||
get_user_onboarding,
|
||||
increment_runs,
|
||||
onboarding_enabled,
|
||||
reset_user_onboarding,
|
||||
update_user_onboarding,
|
||||
@@ -83,7 +78,6 @@ from backend.server.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
CreateGraph,
|
||||
GraphExecutionSource,
|
||||
RequestTopUp,
|
||||
SetGraphActiveVersion,
|
||||
TimezoneResponse,
|
||||
@@ -91,7 +85,6 @@ from backend.server.model import (
|
||||
UpdateTimezoneRequest,
|
||||
UploadFileResponse,
|
||||
)
|
||||
from backend.server.v2.store.model import StoreAgentDetails
|
||||
from backend.util.cache import cached
|
||||
from backend.util.clients import get_scheduler_client
|
||||
from backend.util.cloud_storage import get_cloud_storage_handler
|
||||
@@ -281,10 +274,9 @@ async def update_preferences(
|
||||
|
||||
@v1_router.get(
|
||||
"/onboarding",
|
||||
summary="Onboarding state",
|
||||
summary="Get onboarding status",
|
||||
tags=["onboarding"],
|
||||
dependencies=[Security(requires_user)],
|
||||
response_model=UserOnboarding,
|
||||
)
|
||||
async def get_onboarding(user_id: Annotated[str, Security(get_user_id)]):
|
||||
return await get_user_onboarding(user_id)
|
||||
@@ -292,10 +284,9 @@ async def get_onboarding(user_id: Annotated[str, Security(get_user_id)]):
|
||||
|
||||
@v1_router.patch(
|
||||
"/onboarding",
|
||||
summary="Update onboarding state",
|
||||
summary="Update onboarding progress",
|
||||
tags=["onboarding"],
|
||||
dependencies=[Security(requires_user)],
|
||||
response_model=UserOnboarding,
|
||||
)
|
||||
async def update_onboarding(
|
||||
user_id: Annotated[str, Security(get_user_id)], data: UserOnboardingUpdate
|
||||
@@ -303,39 +294,25 @@ async def update_onboarding(
|
||||
return await update_user_onboarding(user_id, data)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
"/onboarding/step",
|
||||
summary="Complete onboarding step",
|
||||
tags=["onboarding"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def onboarding_complete_step(
|
||||
user_id: Annotated[str, Security(get_user_id)], step: FrontendOnboardingStep
|
||||
):
|
||||
if step not in get_args(FrontendOnboardingStep):
|
||||
raise HTTPException(status_code=400, detail="Invalid onboarding step")
|
||||
return await complete_onboarding_step(user_id, step)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
"/onboarding/agents",
|
||||
summary="Recommended onboarding agents",
|
||||
summary="Get recommended agents",
|
||||
tags=["onboarding"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def get_onboarding_agents(
|
||||
user_id: Annotated[str, Security(get_user_id)],
|
||||
) -> list[StoreAgentDetails]:
|
||||
):
|
||||
return await get_recommended_agents(user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
"/onboarding/enabled",
|
||||
summary="Is onboarding enabled",
|
||||
summary="Check onboarding enabled",
|
||||
tags=["onboarding", "public"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def is_onboarding_enabled() -> bool:
|
||||
async def is_onboarding_enabled():
|
||||
return await onboarding_enabled()
|
||||
|
||||
|
||||
@@ -344,7 +321,6 @@ async def is_onboarding_enabled() -> bool:
|
||||
summary="Reset onboarding progress",
|
||||
tags=["onboarding"],
|
||||
dependencies=[Security(requires_user)],
|
||||
response_model=UserOnboarding,
|
||||
)
|
||||
async def reset_onboarding(user_id: Annotated[str, Security(get_user_id)]):
|
||||
return await reset_user_onboarding(user_id)
|
||||
@@ -833,12 +809,7 @@ async def create_new_graph(
|
||||
# as the graph already valid and no sub-graphs are returned back.
|
||||
await graph_db.create_graph(graph, user_id=user_id)
|
||||
await library_db.create_library_agent(graph, user_id=user_id)
|
||||
activated_graph = await on_graph_activate(graph, user_id=user_id)
|
||||
|
||||
if create_graph.source == "builder":
|
||||
await complete_onboarding_step(user_id, OnboardingStep.BUILDER_SAVE_AGENT)
|
||||
|
||||
return activated_graph
|
||||
return await on_graph_activate(graph, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
@@ -996,7 +967,6 @@ async def execute_graph(
|
||||
credentials_inputs: Annotated[
|
||||
dict[str, CredentialsMetaInput], Body(..., embed=True, default_factory=dict)
|
||||
],
|
||||
source: Annotated[GraphExecutionSource | None, Body(embed=True)] = None,
|
||||
graph_version: Optional[int] = None,
|
||||
preset_id: Optional[str] = None,
|
||||
) -> execution_db.GraphExecutionMeta:
|
||||
@@ -1020,14 +990,6 @@ async def execute_graph(
|
||||
# Record successful graph execution
|
||||
record_graph_execution(graph_id=graph_id, status="success", user_id=user_id)
|
||||
record_graph_operation(operation="execute", status="success")
|
||||
await increment_runs(user_id)
|
||||
await complete_re_run_agent(user_id, graph_id)
|
||||
if source == "library":
|
||||
await complete_onboarding_step(
|
||||
user_id, OnboardingStep.MARKETPLACE_RUN_AGENT
|
||||
)
|
||||
elif source == "builder":
|
||||
await complete_onboarding_step(user_id, OnboardingStep.BUILDER_RUN_AGENT)
|
||||
return result
|
||||
except GraphValidationError as e:
|
||||
# Record failed graph execution
|
||||
@@ -1141,15 +1103,6 @@ async def list_graph_executions(
|
||||
filtered_executions = await hide_activity_summaries_if_disabled(
|
||||
paginated_result.executions, user_id
|
||||
)
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
if (
|
||||
onboarding.onboardingAgentExecutionId
|
||||
and onboarding.onboardingAgentExecutionId
|
||||
in [exec.id for exec in filtered_executions]
|
||||
and OnboardingStep.GET_RESULTS not in onboarding.completedSteps
|
||||
):
|
||||
await complete_onboarding_step(user_id, OnboardingStep.GET_RESULTS)
|
||||
|
||||
return execution_db.GraphExecutionsPaginated(
|
||||
executions=filtered_executions, pagination=paginated_result.pagination
|
||||
)
|
||||
@@ -1187,12 +1140,6 @@ async def get_graph_execution(
|
||||
|
||||
# Apply feature flags to filter out disabled features
|
||||
result = await hide_activity_summary_if_disabled(result, user_id)
|
||||
onboarding = await get_user_onboarding(user_id)
|
||||
if (
|
||||
onboarding.onboardingAgentExecutionId == graph_exec_id
|
||||
and OnboardingStep.GET_RESULTS not in onboarding.completedSteps
|
||||
):
|
||||
await complete_onboarding_step(user_id, OnboardingStep.GET_RESULTS)
|
||||
|
||||
return result
|
||||
|
||||
@@ -1369,8 +1316,6 @@ async def create_graph_execution_schedule(
|
||||
result.next_run_time, user_timezone
|
||||
)
|
||||
|
||||
await complete_onboarding_step(user_id, OnboardingStep.SCHEDULE_AGENT)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@@ -8,10 +8,6 @@ from fastapi import APIRouter, HTTPException, Security
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.analytics import (
|
||||
AccuracyTrendsResponse,
|
||||
get_accuracy_trends_and_alerts,
|
||||
)
|
||||
from backend.data.execution import (
|
||||
ExecutionStatus,
|
||||
GraphExecutionMeta,
|
||||
@@ -87,18 +83,6 @@ class ExecutionAnalyticsConfig(BaseModel):
|
||||
recommended_model: str
|
||||
|
||||
|
||||
class AccuracyTrendsRequest(BaseModel):
|
||||
graph_id: str = Field(..., description="Graph ID to analyze", min_length=1)
|
||||
user_id: Optional[str] = Field(None, description="Optional user ID filter")
|
||||
days_back: int = Field(30, description="Number of days to look back", ge=7, le=90)
|
||||
drop_threshold: float = Field(
|
||||
10.0, description="Alert threshold percentage", ge=1.0, le=50.0
|
||||
)
|
||||
include_historical: bool = Field(
|
||||
False, description="Include historical data for charts"
|
||||
)
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/admin",
|
||||
tags=["admin", "execution_analytics"],
|
||||
@@ -435,40 +419,3 @@ async def _process_batch(
|
||||
return await asyncio.gather(
|
||||
*[process_single_execution(execution) for execution in executions]
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/execution_accuracy_trends",
|
||||
response_model=AccuracyTrendsResponse,
|
||||
summary="Get Execution Accuracy Trends and Alerts",
|
||||
)
|
||||
async def get_execution_accuracy_trends(
|
||||
graph_id: str,
|
||||
user_id: Optional[str] = None,
|
||||
days_back: int = 30,
|
||||
drop_threshold: float = 10.0,
|
||||
include_historical: bool = False,
|
||||
admin_user_id: str = Security(get_user_id),
|
||||
) -> AccuracyTrendsResponse:
|
||||
"""
|
||||
Get execution accuracy trends with moving averages and alert detection.
|
||||
Simple single-query approach.
|
||||
"""
|
||||
logger.info(
|
||||
f"Admin user {admin_user_id} requesting accuracy trends for graph {graph_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
result = await get_accuracy_trends_and_alerts(
|
||||
graph_id=graph_id,
|
||||
days_back=days_back,
|
||||
user_id=user_id,
|
||||
drop_threshold=drop_threshold,
|
||||
include_historical=include_historical,
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error getting accuracy trends for graph {graph_id}: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from difflib import SequenceMatcher
|
||||
from typing import Sequence
|
||||
|
||||
import prisma
|
||||
|
||||
import backend.data.block
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.db as store_db
|
||||
import backend.server.v2.store.model as store_model
|
||||
from backend.blocks import load_all_blocks
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema
|
||||
@@ -21,36 +14,17 @@ from backend.server.v2.builder.model import (
|
||||
BlockResponse,
|
||||
BlockType,
|
||||
CountResponse,
|
||||
FilterType,
|
||||
Provider,
|
||||
ProviderResponse,
|
||||
SearchEntry,
|
||||
SearchBlocksResponse,
|
||||
)
|
||||
from backend.util.cache import cached
|
||||
from backend.util.models import Pagination
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
llm_models = [name.name.lower().replace("_", " ") for name in LlmModel]
|
||||
|
||||
MAX_LIBRARY_AGENT_RESULTS = 100
|
||||
MAX_MARKETPLACE_AGENT_RESULTS = 100
|
||||
MIN_SCORE_FOR_FILTERED_RESULTS = 10.0
|
||||
|
||||
SearchResultItem = BlockInfo | library_model.LibraryAgent | store_model.StoreAgent
|
||||
|
||||
|
||||
@dataclass
|
||||
class _ScoredItem:
|
||||
item: SearchResultItem
|
||||
filter_type: FilterType
|
||||
score: float
|
||||
sort_key: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class _SearchCacheEntry:
|
||||
items: list[SearchResultItem]
|
||||
total_items: dict[FilterType, int]
|
||||
_static_counts_cache: dict | None = None
|
||||
_suggested_blocks: list[BlockInfo] | None = None
|
||||
|
||||
|
||||
def get_block_categories(category_blocks: int = 3) -> list[BlockCategoryResponse]:
|
||||
@@ -156,244 +130,71 @@ def get_block_by_id(block_id: str) -> BlockInfo | None:
|
||||
return None
|
||||
|
||||
|
||||
async def update_search(user_id: str, search: SearchEntry) -> str:
|
||||
def search_blocks(
|
||||
include_blocks: bool = True,
|
||||
include_integrations: bool = True,
|
||||
query: str = "",
|
||||
page: int = 1,
|
||||
page_size: int = 50,
|
||||
) -> SearchBlocksResponse:
|
||||
"""
|
||||
Upsert a search request for the user and return the search ID.
|
||||
Get blocks based on the filter and query.
|
||||
`providers` only applies for `integrations` filter.
|
||||
"""
|
||||
if search.search_id:
|
||||
# Update existing search
|
||||
await prisma.models.BuilderSearchHistory.prisma().update(
|
||||
where={
|
||||
"id": search.search_id,
|
||||
},
|
||||
data={
|
||||
"searchQuery": search.search_query or "",
|
||||
"filter": search.filter or [], # type: ignore
|
||||
"byCreator": search.by_creator or [],
|
||||
},
|
||||
)
|
||||
return search.search_id
|
||||
else:
|
||||
# Create new search
|
||||
new_search = await prisma.models.BuilderSearchHistory.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"searchQuery": search.search_query or "",
|
||||
"filter": search.filter or [], # type: ignore
|
||||
"byCreator": search.by_creator or [],
|
||||
}
|
||||
)
|
||||
return new_search.id
|
||||
blocks: list[AnyBlockSchema] = []
|
||||
query = query.lower()
|
||||
|
||||
|
||||
async def get_recent_searches(user_id: str, limit: int = 5) -> list[SearchEntry]:
|
||||
"""
|
||||
Get the user's most recent search requests.
|
||||
"""
|
||||
searches = await prisma.models.BuilderSearchHistory.prisma().find_many(
|
||||
where={
|
||||
"userId": user_id,
|
||||
},
|
||||
order={
|
||||
"updatedAt": "desc",
|
||||
},
|
||||
take=limit,
|
||||
)
|
||||
return [
|
||||
SearchEntry(
|
||||
search_query=s.searchQuery,
|
||||
filter=s.filter, # type: ignore
|
||||
by_creator=s.byCreator,
|
||||
search_id=s.id,
|
||||
)
|
||||
for s in searches
|
||||
]
|
||||
|
||||
|
||||
async def get_sorted_search_results(
|
||||
*,
|
||||
user_id: str,
|
||||
search_query: str | None,
|
||||
filters: Sequence[FilterType],
|
||||
by_creator: Sequence[str] | None = None,
|
||||
) -> _SearchCacheEntry:
|
||||
normalized_filters: tuple[FilterType, ...] = tuple(sorted(set(filters or [])))
|
||||
normalized_creators: tuple[str, ...] = tuple(sorted(set(by_creator or [])))
|
||||
return await _build_cached_search_results(
|
||||
user_id=user_id,
|
||||
search_query=search_query or "",
|
||||
filters=normalized_filters,
|
||||
by_creator=normalized_creators,
|
||||
)
|
||||
|
||||
|
||||
@cached(ttl_seconds=300, shared_cache=True)
|
||||
async def _build_cached_search_results(
|
||||
user_id: str,
|
||||
search_query: str,
|
||||
filters: tuple[FilterType, ...],
|
||||
by_creator: tuple[str, ...],
|
||||
) -> _SearchCacheEntry:
|
||||
normalized_query = (search_query or "").strip().lower()
|
||||
|
||||
include_blocks = "blocks" in filters
|
||||
include_integrations = "integrations" in filters
|
||||
include_library_agents = "my_agents" in filters
|
||||
include_marketplace_agents = "marketplace_agents" in filters
|
||||
|
||||
scored_items: list[_ScoredItem] = []
|
||||
total_items: dict[FilterType, int] = {
|
||||
"blocks": 0,
|
||||
"integrations": 0,
|
||||
"marketplace_agents": 0,
|
||||
"my_agents": 0,
|
||||
}
|
||||
|
||||
block_results, block_total, integration_total = _collect_block_results(
|
||||
normalized_query=normalized_query,
|
||||
include_blocks=include_blocks,
|
||||
include_integrations=include_integrations,
|
||||
)
|
||||
scored_items.extend(block_results)
|
||||
total_items["blocks"] = block_total
|
||||
total_items["integrations"] = integration_total
|
||||
|
||||
if include_library_agents:
|
||||
library_response = await library_db.list_library_agents(
|
||||
user_id=user_id,
|
||||
search_term=search_query or None,
|
||||
page=1,
|
||||
page_size=MAX_LIBRARY_AGENT_RESULTS,
|
||||
)
|
||||
total_items["my_agents"] = library_response.pagination.total_items
|
||||
scored_items.extend(
|
||||
_build_library_items(
|
||||
agents=library_response.agents,
|
||||
normalized_query=normalized_query,
|
||||
)
|
||||
)
|
||||
|
||||
if include_marketplace_agents:
|
||||
marketplace_response = await store_db.get_store_agents(
|
||||
creators=list(by_creator) or None,
|
||||
search_query=search_query or None,
|
||||
page=1,
|
||||
page_size=MAX_MARKETPLACE_AGENT_RESULTS,
|
||||
)
|
||||
total_items["marketplace_agents"] = marketplace_response.pagination.total_items
|
||||
scored_items.extend(
|
||||
_build_marketplace_items(
|
||||
agents=marketplace_response.agents,
|
||||
normalized_query=normalized_query,
|
||||
)
|
||||
)
|
||||
|
||||
sorted_items = sorted(
|
||||
scored_items,
|
||||
key=lambda entry: (-entry.score, entry.sort_key, entry.filter_type),
|
||||
)
|
||||
|
||||
return _SearchCacheEntry(
|
||||
items=[entry.item for entry in sorted_items],
|
||||
total_items=total_items,
|
||||
)
|
||||
|
||||
|
||||
def _collect_block_results(
|
||||
*,
|
||||
normalized_query: str,
|
||||
include_blocks: bool,
|
||||
include_integrations: bool,
|
||||
) -> tuple[list[_ScoredItem], int, int]:
|
||||
results: list[_ScoredItem] = []
|
||||
total = 0
|
||||
skip = (page - 1) * page_size
|
||||
take = page_size
|
||||
block_count = 0
|
||||
integration_count = 0
|
||||
|
||||
if not include_blocks and not include_integrations:
|
||||
return results, block_count, integration_count
|
||||
|
||||
for block_type in load_all_blocks().values():
|
||||
block: AnyBlockSchema = block_type()
|
||||
# Skip disabled blocks
|
||||
if block.disabled:
|
||||
continue
|
||||
|
||||
block_info = block.get_info()
|
||||
# Skip blocks that don't match the query
|
||||
if (
|
||||
query not in block.name.lower()
|
||||
and query not in block.description.lower()
|
||||
and not _matches_llm_model(block.input_schema, query)
|
||||
):
|
||||
continue
|
||||
keep = False
|
||||
credentials = list(block.input_schema.get_credentials_fields().values())
|
||||
is_integration = len(credentials) > 0
|
||||
|
||||
if is_integration and not include_integrations:
|
||||
continue
|
||||
if not is_integration and not include_blocks:
|
||||
continue
|
||||
|
||||
score = _score_block(block, block_info, normalized_query)
|
||||
if not _should_include_item(score, normalized_query):
|
||||
continue
|
||||
|
||||
filter_type: FilterType = "integrations" if is_integration else "blocks"
|
||||
if is_integration:
|
||||
if include_integrations and len(credentials) > 0:
|
||||
keep = True
|
||||
integration_count += 1
|
||||
else:
|
||||
if include_blocks and len(credentials) == 0:
|
||||
keep = True
|
||||
block_count += 1
|
||||
|
||||
results.append(
|
||||
_ScoredItem(
|
||||
item=block_info,
|
||||
filter_type=filter_type,
|
||||
score=score,
|
||||
sort_key=_get_item_name(block_info),
|
||||
)
|
||||
)
|
||||
|
||||
return results, block_count, integration_count
|
||||
|
||||
|
||||
def _build_library_items(
|
||||
*,
|
||||
agents: list[library_model.LibraryAgent],
|
||||
normalized_query: str,
|
||||
) -> list[_ScoredItem]:
|
||||
results: list[_ScoredItem] = []
|
||||
|
||||
for agent in agents:
|
||||
score = _score_library_agent(agent, normalized_query)
|
||||
if not _should_include_item(score, normalized_query):
|
||||
if not keep:
|
||||
continue
|
||||
|
||||
results.append(
|
||||
_ScoredItem(
|
||||
item=agent,
|
||||
filter_type="my_agents",
|
||||
score=score,
|
||||
sort_key=_get_item_name(agent),
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def _build_marketplace_items(
|
||||
*,
|
||||
agents: list[store_model.StoreAgent],
|
||||
normalized_query: str,
|
||||
) -> list[_ScoredItem]:
|
||||
results: list[_ScoredItem] = []
|
||||
|
||||
for agent in agents:
|
||||
score = _score_store_agent(agent, normalized_query)
|
||||
if not _should_include_item(score, normalized_query):
|
||||
total += 1
|
||||
if skip > 0:
|
||||
skip -= 1
|
||||
continue
|
||||
if take > 0:
|
||||
take -= 1
|
||||
blocks.append(block)
|
||||
|
||||
results.append(
|
||||
_ScoredItem(
|
||||
item=agent,
|
||||
filter_type="marketplace_agents",
|
||||
score=score,
|
||||
sort_key=_get_item_name(agent),
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
return SearchBlocksResponse(
|
||||
blocks=BlockResponse(
|
||||
blocks=[b.get_info() for b in blocks],
|
||||
pagination=Pagination(
|
||||
total_items=total,
|
||||
total_pages=(total + page_size - 1) // page_size,
|
||||
current_page=page,
|
||||
page_size=page_size,
|
||||
),
|
||||
),
|
||||
total_block_count=block_count,
|
||||
total_integration_count=integration_count,
|
||||
)
|
||||
|
||||
|
||||
def get_providers(
|
||||
@@ -450,12 +251,16 @@ async def get_counts(user_id: str) -> CountResponse:
|
||||
)
|
||||
|
||||
|
||||
@cached(ttl_seconds=3600)
|
||||
async def _get_static_counts():
|
||||
"""
|
||||
Get counts of blocks, integrations, and marketplace agents.
|
||||
This is cached to avoid unnecessary database queries and calculations.
|
||||
Can't use functools.cache here because the function is async.
|
||||
"""
|
||||
global _static_counts_cache
|
||||
if _static_counts_cache is not None:
|
||||
return _static_counts_cache
|
||||
|
||||
all_blocks = 0
|
||||
input_blocks = 0
|
||||
action_blocks = 0
|
||||
@@ -482,7 +287,7 @@ async def _get_static_counts():
|
||||
|
||||
marketplace_agents = await prisma.models.StoreAgent.prisma().count()
|
||||
|
||||
return {
|
||||
_static_counts_cache = {
|
||||
"all_blocks": all_blocks,
|
||||
"input_blocks": input_blocks,
|
||||
"action_blocks": action_blocks,
|
||||
@@ -491,6 +296,8 @@ async def _get_static_counts():
|
||||
"marketplace_agents": marketplace_agents,
|
||||
}
|
||||
|
||||
return _static_counts_cache
|
||||
|
||||
|
||||
def _matches_llm_model(schema_cls: type[BlockSchema], query: str) -> bool:
|
||||
for field in schema_cls.model_fields.values():
|
||||
@@ -501,123 +308,6 @@ def _matches_llm_model(schema_cls: type[BlockSchema], query: str) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _score_block(
|
||||
block: AnyBlockSchema,
|
||||
block_info: BlockInfo,
|
||||
normalized_query: str,
|
||||
) -> float:
|
||||
if not normalized_query:
|
||||
return 0.0
|
||||
|
||||
name = block_info.name.lower()
|
||||
description = block_info.description.lower()
|
||||
score = _score_primary_fields(name, description, normalized_query)
|
||||
|
||||
category_text = " ".join(
|
||||
category.get("category", "").lower() for category in block_info.categories
|
||||
)
|
||||
score += _score_additional_field(category_text, normalized_query, 12, 6)
|
||||
|
||||
credentials_info = block.input_schema.get_credentials_fields_info().values()
|
||||
provider_names = [
|
||||
provider.value.lower()
|
||||
for info in credentials_info
|
||||
for provider in info.provider
|
||||
]
|
||||
provider_text = " ".join(provider_names)
|
||||
score += _score_additional_field(provider_text, normalized_query, 15, 6)
|
||||
|
||||
if _matches_llm_model(block.input_schema, normalized_query):
|
||||
score += 20
|
||||
|
||||
return score
|
||||
|
||||
|
||||
def _score_library_agent(
|
||||
agent: library_model.LibraryAgent,
|
||||
normalized_query: str,
|
||||
) -> float:
|
||||
if not normalized_query:
|
||||
return 0.0
|
||||
|
||||
name = agent.name.lower()
|
||||
description = (agent.description or "").lower()
|
||||
instructions = (agent.instructions or "").lower()
|
||||
|
||||
score = _score_primary_fields(name, description, normalized_query)
|
||||
score += _score_additional_field(instructions, normalized_query, 15, 6)
|
||||
score += _score_additional_field(
|
||||
agent.creator_name.lower(), normalized_query, 10, 5
|
||||
)
|
||||
|
||||
return score
|
||||
|
||||
|
||||
def _score_store_agent(
|
||||
agent: store_model.StoreAgent,
|
||||
normalized_query: str,
|
||||
) -> float:
|
||||
if not normalized_query:
|
||||
return 0.0
|
||||
|
||||
name = agent.agent_name.lower()
|
||||
description = agent.description.lower()
|
||||
sub_heading = agent.sub_heading.lower()
|
||||
|
||||
score = _score_primary_fields(name, description, normalized_query)
|
||||
score += _score_additional_field(sub_heading, normalized_query, 12, 6)
|
||||
score += _score_additional_field(agent.creator.lower(), normalized_query, 10, 5)
|
||||
|
||||
return score
|
||||
|
||||
|
||||
def _score_primary_fields(name: str, description: str, query: str) -> float:
|
||||
score = 0.0
|
||||
if name == query:
|
||||
score += 120
|
||||
elif name.startswith(query):
|
||||
score += 90
|
||||
elif query in name:
|
||||
score += 60
|
||||
|
||||
score += SequenceMatcher(None, name, query).ratio() * 50
|
||||
if description:
|
||||
if query in description:
|
||||
score += 30
|
||||
score += SequenceMatcher(None, description, query).ratio() * 25
|
||||
return score
|
||||
|
||||
|
||||
def _score_additional_field(
|
||||
value: str,
|
||||
query: str,
|
||||
contains_weight: float,
|
||||
similarity_weight: float,
|
||||
) -> float:
|
||||
if not value or not query:
|
||||
return 0.0
|
||||
|
||||
score = 0.0
|
||||
if query in value:
|
||||
score += contains_weight
|
||||
score += SequenceMatcher(None, value, query).ratio() * similarity_weight
|
||||
return score
|
||||
|
||||
|
||||
def _should_include_item(score: float, normalized_query: str) -> bool:
|
||||
if not normalized_query:
|
||||
return True
|
||||
return score >= MIN_SCORE_FOR_FILTERED_RESULTS
|
||||
|
||||
|
||||
def _get_item_name(item: SearchResultItem) -> str:
|
||||
if isinstance(item, BlockInfo):
|
||||
return item.name.lower()
|
||||
if isinstance(item, library_model.LibraryAgent):
|
||||
return item.name.lower()
|
||||
return item.agent_name.lower()
|
||||
|
||||
|
||||
@cached(ttl_seconds=3600)
|
||||
def _get_all_providers() -> dict[ProviderName, Provider]:
|
||||
providers: dict[ProviderName, Provider] = {}
|
||||
@@ -639,9 +329,13 @@ def _get_all_providers() -> dict[ProviderName, Provider]:
|
||||
return providers
|
||||
|
||||
|
||||
@cached(ttl_seconds=3600)
|
||||
async def get_suggested_blocks(count: int = 5) -> list[BlockInfo]:
|
||||
suggested_blocks = []
|
||||
global _suggested_blocks
|
||||
|
||||
if _suggested_blocks is not None and len(_suggested_blocks) >= count:
|
||||
return _suggested_blocks[:count]
|
||||
|
||||
_suggested_blocks = []
|
||||
# Sum the number of executions for each block type
|
||||
# Prisma cannot group by nested relations, so we do a raw query
|
||||
# Calculate the cutoff timestamp
|
||||
@@ -682,7 +376,7 @@ async def get_suggested_blocks(count: int = 5) -> list[BlockInfo]:
|
||||
# Sort blocks by execution count
|
||||
blocks.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
suggested_blocks = [block[0] for block in blocks]
|
||||
_suggested_blocks = [block[0] for block in blocks]
|
||||
|
||||
# Return the top blocks
|
||||
return suggested_blocks[:count]
|
||||
return _suggested_blocks[:count]
|
||||
|
||||
@@ -18,17 +18,10 @@ FilterType = Literal[
|
||||
BlockType = Literal["all", "input", "action", "output"]
|
||||
|
||||
|
||||
class SearchEntry(BaseModel):
|
||||
search_query: str | None = None
|
||||
filter: list[FilterType] | None = None
|
||||
by_creator: list[str] | None = None
|
||||
search_id: str | None = None
|
||||
|
||||
|
||||
# Suggestions
|
||||
class SuggestionsResponse(BaseModel):
|
||||
otto_suggestions: list[str]
|
||||
recent_searches: list[SearchEntry]
|
||||
recent_searches: list[str]
|
||||
providers: list[ProviderName]
|
||||
top_blocks: list[BlockInfo]
|
||||
|
||||
@@ -39,7 +32,7 @@ class BlockCategoryResponse(BaseModel):
|
||||
total_blocks: int
|
||||
blocks: list[BlockInfo]
|
||||
|
||||
model_config = {"use_enum_values": False} # Use enum names like "AI"
|
||||
model_config = {"use_enum_values": False} # <== use enum names like "AI"
|
||||
|
||||
|
||||
# Input/Action/Output and see all for block categories
|
||||
@@ -60,11 +53,17 @@ class ProviderResponse(BaseModel):
|
||||
pagination: Pagination
|
||||
|
||||
|
||||
class SearchBlocksResponse(BaseModel):
|
||||
blocks: BlockResponse
|
||||
total_block_count: int
|
||||
total_integration_count: int
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
items: list[BlockInfo | library_model.LibraryAgent | store_model.StoreAgent]
|
||||
search_id: str
|
||||
total_items: dict[FilterType, int]
|
||||
pagination: Pagination
|
||||
page: int
|
||||
more_pages: bool
|
||||
|
||||
|
||||
class CountResponse(BaseModel):
|
||||
|
||||
@@ -6,6 +6,10 @@ from autogpt_libs.auth.dependencies import get_user_id, requires_user
|
||||
|
||||
import backend.server.v2.builder.db as builder_db
|
||||
import backend.server.v2.builder.model as builder_model
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.db as store_db
|
||||
import backend.server.v2.store.model as store_model
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
|
||||
@@ -41,9 +45,7 @@ def sanitize_query(query: str | None) -> str | None:
|
||||
summary="Get Builder suggestions",
|
||||
response_model=builder_model.SuggestionsResponse,
|
||||
)
|
||||
async def get_suggestions(
|
||||
user_id: Annotated[str, fastapi.Security(get_user_id)],
|
||||
) -> builder_model.SuggestionsResponse:
|
||||
async def get_suggestions() -> builder_model.SuggestionsResponse:
|
||||
"""
|
||||
Get all suggestions for the Blocks Menu.
|
||||
"""
|
||||
@@ -53,7 +55,11 @@ async def get_suggestions(
|
||||
"Help me create a list",
|
||||
"Help me feed my data to Google Maps",
|
||||
],
|
||||
recent_searches=await builder_db.get_recent_searches(user_id),
|
||||
recent_searches=[
|
||||
"image generation",
|
||||
"deepfake",
|
||||
"competitor analysis",
|
||||
],
|
||||
providers=[
|
||||
ProviderName.TWITTER,
|
||||
ProviderName.GITHUB,
|
||||
@@ -141,6 +147,7 @@ async def get_providers(
|
||||
)
|
||||
|
||||
|
||||
# Not using post method because on frontend, orval doesn't support Infinite Query with POST method.
|
||||
@router.get(
|
||||
"/search",
|
||||
summary="Builder search",
|
||||
@@ -150,7 +157,7 @@ async def get_providers(
|
||||
async def search(
|
||||
user_id: Annotated[str, fastapi.Security(get_user_id)],
|
||||
search_query: Annotated[str | None, fastapi.Query()] = None,
|
||||
filter: Annotated[list[builder_model.FilterType] | None, fastapi.Query()] = None,
|
||||
filter: Annotated[list[str] | None, fastapi.Query()] = None,
|
||||
search_id: Annotated[str | None, fastapi.Query()] = None,
|
||||
by_creator: Annotated[list[str] | None, fastapi.Query()] = None,
|
||||
page: Annotated[int, fastapi.Query()] = 1,
|
||||
@@ -169,43 +176,69 @@ async def search(
|
||||
]
|
||||
search_query = sanitize_query(search_query)
|
||||
|
||||
# Get all possible results
|
||||
cached_results = await builder_db.get_sorted_search_results(
|
||||
user_id=user_id,
|
||||
search_query=search_query,
|
||||
filters=filter,
|
||||
by_creator=by_creator,
|
||||
)
|
||||
|
||||
# Paginate results
|
||||
total_combined_items = len(cached_results.items)
|
||||
pagination = Pagination(
|
||||
total_items=total_combined_items,
|
||||
total_pages=(total_combined_items + page_size - 1) // page_size,
|
||||
current_page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
paginated_items = cached_results.items[start_idx:end_idx]
|
||||
|
||||
# Update the search entry by id
|
||||
search_id = await builder_db.update_search(
|
||||
user_id,
|
||||
builder_model.SearchEntry(
|
||||
search_query=search_query,
|
||||
filter=filter,
|
||||
by_creator=by_creator,
|
||||
search_id=search_id,
|
||||
# Blocks&Integrations
|
||||
blocks = builder_model.SearchBlocksResponse(
|
||||
blocks=builder_model.BlockResponse(
|
||||
blocks=[],
|
||||
pagination=Pagination.empty(),
|
||||
),
|
||||
total_block_count=0,
|
||||
total_integration_count=0,
|
||||
)
|
||||
if "blocks" in filter or "integrations" in filter:
|
||||
blocks = builder_db.search_blocks(
|
||||
include_blocks="blocks" in filter,
|
||||
include_integrations="integrations" in filter,
|
||||
query=search_query or "",
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
# Library Agents
|
||||
my_agents = library_model.LibraryAgentResponse(
|
||||
agents=[],
|
||||
pagination=Pagination.empty(),
|
||||
)
|
||||
if "my_agents" in filter:
|
||||
my_agents = await library_db.list_library_agents(
|
||||
user_id=user_id,
|
||||
search_term=search_query,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
# Marketplace Agents
|
||||
marketplace_agents = store_model.StoreAgentsResponse(
|
||||
agents=[],
|
||||
pagination=Pagination.empty(),
|
||||
)
|
||||
if "marketplace_agents" in filter:
|
||||
marketplace_agents = await store_db.get_store_agents(
|
||||
creators=by_creator,
|
||||
search_query=search_query,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
more_pages = False
|
||||
if (
|
||||
blocks.blocks.pagination.current_page < blocks.blocks.pagination.total_pages
|
||||
or my_agents.pagination.current_page < my_agents.pagination.total_pages
|
||||
or marketplace_agents.pagination.current_page
|
||||
< marketplace_agents.pagination.total_pages
|
||||
):
|
||||
more_pages = True
|
||||
|
||||
return builder_model.SearchResponse(
|
||||
items=paginated_items,
|
||||
search_id=search_id,
|
||||
total_items=cached_results.total_items,
|
||||
pagination=pagination,
|
||||
items=blocks.blocks.blocks + my_agents.agents + marketplace_agents.agents,
|
||||
total_items={
|
||||
"blocks": blocks.total_block_count,
|
||||
"integrations": blocks.total_integration_count,
|
||||
"marketplace_agents": marketplace_agents.pagination.total_items,
|
||||
"my_agents": my_agents.pagination.total_items,
|
||||
},
|
||||
page=page,
|
||||
more_pages=more_pages,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
tags=["v2", "executions", "review"],
|
||||
tags=["executions", "review", "private"],
|
||||
dependencies=[Security(autogpt_auth_lib.requires_user)],
|
||||
)
|
||||
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
import logging
|
||||
from typing import Literal, Optional
|
||||
from typing import Optional
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, Body, HTTPException, Query, Security, status
|
||||
from fastapi.responses import Response
|
||||
from prisma.enums import OnboardingStep
|
||||
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
from backend.data.onboarding import complete_onboarding_step
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -202,9 +200,6 @@ async def get_library_agent_by_store_listing_version_id(
|
||||
)
|
||||
async def add_marketplace_agent_to_library(
|
||||
store_listing_version_id: str = Body(embed=True),
|
||||
source: Literal["onboarding", "marketplace"] = Body(
|
||||
default="marketplace", embed=True
|
||||
),
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.LibraryAgent:
|
||||
"""
|
||||
@@ -222,15 +217,10 @@ async def add_marketplace_agent_to_library(
|
||||
HTTPException(500): If a server/database error occurs.
|
||||
"""
|
||||
try:
|
||||
agent = await library_db.add_store_agent_to_library(
|
||||
return await library_db.add_store_agent_to_library(
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
if source != "onboarding":
|
||||
await complete_onboarding_step(
|
||||
user_id, OnboardingStep.MARKETPLACE_ADD_AGENT
|
||||
)
|
||||
return agent
|
||||
|
||||
except store_exceptions.AgentNotFoundError as e:
|
||||
logger.warning(
|
||||
|
||||
@@ -10,7 +10,6 @@ from backend.data.execution import GraphExecutionMeta
|
||||
from backend.data.graph import get_graph
|
||||
from backend.data.integrations import get_webhook
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.onboarding import increment_runs
|
||||
from backend.executor.utils import add_graph_execution, make_node_credentials_input_map
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.webhooks import get_webhook_manager
|
||||
@@ -402,8 +401,6 @@ async def execute_preset(
|
||||
merged_node_input = preset.inputs | inputs
|
||||
merged_credential_inputs = preset.credentials | credential_inputs
|
||||
|
||||
await increment_runs(user_id)
|
||||
|
||||
return await add_graph_execution(
|
||||
user_id=user_id,
|
||||
graph_id=preset.graph_id,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import datetime
|
||||
import json
|
||||
from unittest.mock import AsyncMock
|
||||
|
||||
import fastapi.testclient
|
||||
import pytest
|
||||
@@ -226,10 +225,6 @@ def test_add_agent_to_library_success(
|
||||
"backend.server.v2.library.db.add_store_agent_to_library"
|
||||
)
|
||||
mock_db_call.return_value = mock_library_agent
|
||||
mock_complete_onboarding = mocker.patch(
|
||||
"backend.server.v2.library.routes.agents.complete_onboarding_step",
|
||||
new_callable=AsyncMock,
|
||||
)
|
||||
|
||||
response = client.post(
|
||||
"/agents", json={"store_listing_version_id": "test-version-id"}
|
||||
@@ -244,7 +239,6 @@ def test_add_agent_to_library_success(
|
||||
mock_db_call.assert_called_once_with(
|
||||
store_listing_version_id="test-version-id", user_id=test_user_id
|
||||
)
|
||||
mock_complete_onboarding.assert_awaited_once()
|
||||
|
||||
|
||||
def test_add_agent_to_library_error(mocker: pytest_mock.MockFixture, test_user_id: str):
|
||||
|
||||
@@ -0,0 +1,168 @@
|
||||
"""
|
||||
Script to backfill embeddings for existing store listing versions.
|
||||
|
||||
This script should be run after the migration to add the embedding column
|
||||
to populate embeddings for all existing store listing versions.
|
||||
|
||||
Usage:
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings --dry-run
|
||||
poetry run python -m backend.server.v2.store.backfill_embeddings --batch-size 25
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from backend.data.db import connect, disconnect, query_raw_with_schema
|
||||
from backend.integrations.embeddings import EmbeddingService, create_search_text
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Default batch size for processing
|
||||
DEFAULT_BATCH_SIZE = 50
|
||||
|
||||
# Delay between batches to avoid rate limits (seconds)
|
||||
BATCH_DELAY_SECONDS = 1.0
|
||||
|
||||
|
||||
async def backfill_embeddings(
|
||||
dry_run: bool = False,
|
||||
batch_size: int = DEFAULT_BATCH_SIZE,
|
||||
) -> tuple[int, int]:
|
||||
"""
|
||||
Backfill embeddings for all store listing versions without embeddings.
|
||||
|
||||
Args:
|
||||
dry_run: If True, don't make any changes, just report what would be done.
|
||||
batch_size: Number of versions to process in each batch.
|
||||
|
||||
Returns:
|
||||
Tuple of (processed_count, error_count)
|
||||
"""
|
||||
await connect()
|
||||
|
||||
try:
|
||||
embedding_service = EmbeddingService()
|
||||
|
||||
# Get all versions without embeddings
|
||||
versions = await query_raw_with_schema(
|
||||
"""
|
||||
SELECT id, name, "subHeading", description
|
||||
FROM {schema_prefix}"StoreListingVersion"
|
||||
WHERE embedding IS NULL
|
||||
ORDER BY "createdAt" DESC
|
||||
"""
|
||||
)
|
||||
|
||||
total = len(versions)
|
||||
logger.info(f"Found {total} versions without embeddings")
|
||||
|
||||
if dry_run:
|
||||
logger.info("Dry run mode - no changes will be made")
|
||||
return (0, 0)
|
||||
|
||||
if total == 0:
|
||||
logger.info("No versions need embeddings")
|
||||
return (0, 0)
|
||||
|
||||
processed = 0
|
||||
errors = 0
|
||||
|
||||
for i in range(0, total, batch_size):
|
||||
batch = versions[i : i + batch_size]
|
||||
batch_num = (i // batch_size) + 1
|
||||
total_batches = (total + batch_size - 1) // batch_size
|
||||
|
||||
logger.info(f"Processing batch {batch_num}/{total_batches}")
|
||||
|
||||
for version in batch:
|
||||
version_id = version["id"]
|
||||
try:
|
||||
search_text = create_search_text(
|
||||
version["name"] or "",
|
||||
version["subHeading"] or "",
|
||||
version["description"] or "",
|
||||
)
|
||||
|
||||
if not search_text:
|
||||
logger.warning(f"Skipping {version_id} - no searchable text")
|
||||
continue
|
||||
|
||||
embedding = await embedding_service.generate_embedding(search_text)
|
||||
embedding_str = "[" + ",".join(map(str, embedding)) + "]"
|
||||
|
||||
await query_raw_with_schema(
|
||||
"""
|
||||
UPDATE {schema_prefix}"StoreListingVersion"
|
||||
SET embedding = $1::vector
|
||||
WHERE id = $2
|
||||
""",
|
||||
embedding_str,
|
||||
version_id,
|
||||
)
|
||||
|
||||
processed += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing {version_id}: {e}")
|
||||
errors += 1
|
||||
|
||||
logger.info(f"Progress: {processed}/{total} processed, {errors} errors")
|
||||
|
||||
# Rate limit: wait between batches to avoid hitting API limits
|
||||
if i + batch_size < total:
|
||||
await asyncio.sleep(BATCH_DELAY_SECONDS)
|
||||
|
||||
logger.info(f"Backfill complete: {processed} processed, {errors} errors")
|
||||
return (processed, errors)
|
||||
|
||||
finally:
|
||||
await disconnect()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Backfill embeddings for store listing versions"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Don't make any changes, just report what would be done",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--batch-size",
|
||||
type=int,
|
||||
default=DEFAULT_BATCH_SIZE,
|
||||
help=f"Number of versions to process in each batch (default: {DEFAULT_BATCH_SIZE})",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
processed, errors = asyncio.run(
|
||||
backfill_embeddings(dry_run=args.dry_run, batch_size=args.batch_size)
|
||||
)
|
||||
|
||||
if errors > 0:
|
||||
logger.warning(f"Completed with {errors} errors")
|
||||
sys.exit(1)
|
||||
else:
|
||||
logger.info("Completed successfully")
|
||||
sys.exit(0)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("Interrupted by user")
|
||||
sys.exit(130)
|
||||
except Exception as e:
|
||||
logger.error(f"Fatal error: {e}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -26,6 +26,7 @@ from backend.data.notifications import (
|
||||
AgentRejectionData,
|
||||
NotificationEventModel,
|
||||
)
|
||||
from backend.integrations.embeddings import create_search_text, get_embedding_service
|
||||
from backend.notifications.notifications import queue_notification_async
|
||||
from backend.util.exceptions import DatabaseError
|
||||
from backend.util.settings import Settings
|
||||
@@ -56,31 +57,40 @@ async def get_store_agents(
|
||||
)
|
||||
|
||||
try:
|
||||
# If search_query is provided, use full-text search
|
||||
# If search_query is provided, use vector similarity search
|
||||
if search_query:
|
||||
offset = (page - 1) * page_size
|
||||
|
||||
# Generate embedding for search query
|
||||
embedding_service = await get_embedding_service()
|
||||
query_embedding = await embedding_service.generate_embedding(search_query)
|
||||
# Convert embedding to PostgreSQL array format
|
||||
embedding_str = "[" + ",".join(map(str, query_embedding)) + "]"
|
||||
|
||||
# Whitelist allowed order_by columns
|
||||
# For vector search, we use similarity instead of rank
|
||||
ALLOWED_ORDER_BY = {
|
||||
"rating": "rating DESC, rank DESC",
|
||||
"runs": "runs DESC, rank DESC",
|
||||
"name": "agent_name ASC, rank ASC",
|
||||
"updated_at": "updated_at DESC, rank DESC",
|
||||
"rating": "rating DESC, similarity DESC",
|
||||
"runs": "runs DESC, similarity DESC",
|
||||
"name": "agent_name ASC, similarity DESC",
|
||||
"updated_at": "updated_at DESC, similarity DESC",
|
||||
}
|
||||
|
||||
# Validate and get order clause
|
||||
if sorted_by and sorted_by in ALLOWED_ORDER_BY:
|
||||
order_by_clause = ALLOWED_ORDER_BY[sorted_by]
|
||||
else:
|
||||
order_by_clause = "updated_at DESC, rank DESC"
|
||||
# Default: order by vector similarity (most similar first)
|
||||
order_by_clause = "similarity DESC, updated_at DESC"
|
||||
|
||||
# Build WHERE conditions and parameters list
|
||||
where_parts: list[str] = []
|
||||
params: list[typing.Any] = [search_query] # $1 - search term
|
||||
params: list[typing.Any] = [embedding_str] # $1 - query embedding
|
||||
param_index = 2 # Start at $2 for next parameter
|
||||
|
||||
# Always filter for available agents
|
||||
# Always filter for available agents and agents with embeddings
|
||||
where_parts.append("is_available = true")
|
||||
where_parts.append("embedding IS NOT NULL")
|
||||
|
||||
if featured:
|
||||
where_parts.append("featured = true")
|
||||
@@ -103,7 +113,9 @@ async def get_store_agents(
|
||||
limit_param = f"${param_index}"
|
||||
offset_param = f"${param_index + 1}"
|
||||
|
||||
# Execute full-text search query with parameterized values
|
||||
# Vector similarity search query using cosine distance
|
||||
# The <=> operator returns cosine distance (0 = identical, 2 = opposite)
|
||||
# We convert to similarity: 1 - distance/2 gives range [0, 1]
|
||||
sql_query = f"""
|
||||
SELECT
|
||||
slug,
|
||||
@@ -119,22 +131,18 @@ async def get_store_agents(
|
||||
featured,
|
||||
is_available,
|
||||
updated_at,
|
||||
ts_rank_cd(search, query) AS rank
|
||||
FROM {{schema_prefix}}"StoreAgent",
|
||||
plainto_tsquery('english', $1) AS query
|
||||
1 - (embedding <=> $1::vector) AS similarity
|
||||
FROM {{schema_prefix}}"StoreAgent"
|
||||
WHERE {sql_where_clause}
|
||||
AND search @@ query
|
||||
ORDER BY {order_by_clause}
|
||||
LIMIT {limit_param} OFFSET {offset_param}
|
||||
"""
|
||||
|
||||
# Count query for pagination - only uses search term parameter
|
||||
# Count query for pagination
|
||||
count_query = f"""
|
||||
SELECT COUNT(*) as count
|
||||
FROM {{schema_prefix}}"StoreAgent",
|
||||
plainto_tsquery('english', $1) AS query
|
||||
FROM {{schema_prefix}}"StoreAgent"
|
||||
WHERE {sql_where_clause}
|
||||
AND search @@ query
|
||||
"""
|
||||
|
||||
# Execute both queries with parameters
|
||||
@@ -255,6 +263,56 @@ async def log_search_term(search_query: str):
|
||||
logger.error(f"Error logging search term: {e}")
|
||||
|
||||
|
||||
async def _generate_and_store_embedding(
|
||||
store_listing_version_id: str,
|
||||
name: str,
|
||||
sub_heading: str,
|
||||
description: str,
|
||||
) -> None:
|
||||
"""
|
||||
Generate and store embedding for a store listing version.
|
||||
|
||||
This creates a vector embedding from the agent's name, sub_heading, and
|
||||
description, which is used for semantic search.
|
||||
|
||||
Args:
|
||||
store_listing_version_id: The ID of the store listing version.
|
||||
name: The agent name.
|
||||
sub_heading: The agent sub-heading/tagline.
|
||||
description: The agent description.
|
||||
"""
|
||||
try:
|
||||
embedding_service = await get_embedding_service()
|
||||
search_text = create_search_text(name, sub_heading, description)
|
||||
|
||||
if not search_text:
|
||||
logger.warning(
|
||||
f"No searchable text for version {store_listing_version_id}, "
|
||||
"skipping embedding generation"
|
||||
)
|
||||
return
|
||||
|
||||
embedding = await embedding_service.generate_embedding(search_text)
|
||||
embedding_str = "[" + ",".join(map(str, embedding)) + "]"
|
||||
|
||||
await query_raw_with_schema(
|
||||
"""
|
||||
UPDATE {schema_prefix}"StoreListingVersion"
|
||||
SET embedding = $1::vector
|
||||
WHERE id = $2
|
||||
""",
|
||||
embedding_str,
|
||||
store_listing_version_id,
|
||||
)
|
||||
logger.debug(f"Generated embedding for version {store_listing_version_id}")
|
||||
except Exception as e:
|
||||
# Log error but don't fail the whole operation
|
||||
# Embeddings can be generated later via backfill
|
||||
logger.error(
|
||||
f"Failed to generate embedding for {store_listing_version_id}: {e}"
|
||||
)
|
||||
|
||||
|
||||
async def get_store_agent_details(
|
||||
username: str, agent_name: str
|
||||
) -> backend.server.v2.store.model.StoreAgentDetails:
|
||||
@@ -327,7 +385,6 @@ async def get_store_agent_details(
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
agent_video=agent.agent_video or "",
|
||||
agent_output_demo=agent.agent_output_demo or "",
|
||||
agent_image=agent.agent_image,
|
||||
creator=agent.creator_username or "",
|
||||
creator_avatar=agent.creator_avatar or "",
|
||||
@@ -398,7 +455,6 @@ async def get_store_agent_by_version_id(
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
agent_video=agent.agent_video or "",
|
||||
agent_output_demo=agent.agent_output_demo or "",
|
||||
agent_image=agent.agent_image,
|
||||
creator=agent.creator_username or "",
|
||||
creator_avatar=agent.creator_avatar or "",
|
||||
@@ -685,7 +741,6 @@ async def create_store_submission(
|
||||
slug: str,
|
||||
name: str,
|
||||
video_url: str | None = None,
|
||||
agent_output_demo_url: str | None = None,
|
||||
image_urls: list[str] = [],
|
||||
description: str = "",
|
||||
instructions: str | None = None,
|
||||
@@ -780,7 +835,6 @@ async def create_store_submission(
|
||||
agentGraphVersion=agent_version,
|
||||
name=name,
|
||||
videoUrl=video_url,
|
||||
agentOutputDemoUrl=agent_output_demo_url,
|
||||
imageUrls=image_urls,
|
||||
description=description,
|
||||
instructions=instructions,
|
||||
@@ -805,6 +859,12 @@ async def create_store_submission(
|
||||
else None
|
||||
)
|
||||
|
||||
# Generate embedding for semantic search
|
||||
if store_listing_version_id:
|
||||
await _generate_and_store_embedding(
|
||||
store_listing_version_id, name, sub_heading, description
|
||||
)
|
||||
|
||||
logger.debug(f"Created store listing for agent {agent_id}")
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
@@ -853,7 +913,6 @@ async def edit_store_submission(
|
||||
store_listing_version_id: str,
|
||||
name: str,
|
||||
video_url: str | None = None,
|
||||
agent_output_demo_url: str | None = None,
|
||||
image_urls: list[str] = [],
|
||||
description: str = "",
|
||||
sub_heading: str = "",
|
||||
@@ -935,7 +994,6 @@ async def edit_store_submission(
|
||||
store_listing_id=current_version.storeListingId,
|
||||
name=name,
|
||||
video_url=video_url,
|
||||
agent_output_demo_url=agent_output_demo_url,
|
||||
image_urls=image_urls,
|
||||
description=description,
|
||||
sub_heading=sub_heading,
|
||||
@@ -953,7 +1011,6 @@ async def edit_store_submission(
|
||||
data=prisma.types.StoreListingVersionUpdateInput(
|
||||
name=name,
|
||||
videoUrl=video_url,
|
||||
agentOutputDemoUrl=agent_output_demo_url,
|
||||
imageUrls=image_urls,
|
||||
description=description,
|
||||
categories=categories,
|
||||
@@ -970,6 +1027,12 @@ async def edit_store_submission(
|
||||
|
||||
if not updated_version:
|
||||
raise DatabaseError("Failed to update store listing version")
|
||||
|
||||
# Regenerate embedding with updated content
|
||||
await _generate_and_store_embedding(
|
||||
store_listing_version_id, name, sub_heading, description
|
||||
)
|
||||
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
agent_id=current_version.agentGraphId,
|
||||
agent_version=current_version.agentGraphVersion,
|
||||
@@ -1015,7 +1078,6 @@ async def create_store_version(
|
||||
store_listing_id: str,
|
||||
name: str,
|
||||
video_url: str | None = None,
|
||||
agent_output_demo_url: str | None = None,
|
||||
image_urls: list[str] = [],
|
||||
description: str = "",
|
||||
instructions: str | None = None,
|
||||
@@ -1085,7 +1147,6 @@ async def create_store_version(
|
||||
agentGraphVersion=agent_version,
|
||||
name=name,
|
||||
videoUrl=video_url,
|
||||
agentOutputDemoUrl=agent_output_demo_url,
|
||||
imageUrls=image_urls,
|
||||
description=description,
|
||||
instructions=instructions,
|
||||
@@ -1102,6 +1163,12 @@ async def create_store_version(
|
||||
logger.debug(
|
||||
f"Created new version for listing {store_listing_id} of agent {agent_id}"
|
||||
)
|
||||
|
||||
# Generate embedding for semantic search
|
||||
await _generate_and_store_embedding(
|
||||
new_version.id, name, sub_heading, description
|
||||
)
|
||||
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
agent_id=agent_id,
|
||||
|
||||
@@ -405,3 +405,237 @@ async def test_get_store_agents_search_category_array_injection():
|
||||
# Verify the query executed without error
|
||||
# Category should be parameterized, preventing SQL injection
|
||||
assert isinstance(result.agents, list)
|
||||
|
||||
|
||||
# Vector search tests
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_mocked(mocker):
|
||||
"""Test vector search uses embedding service and executes query safely."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema to return empty results
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[[], [{"count": 0}]]),
|
||||
)
|
||||
|
||||
# Call function with search query
|
||||
result = await db.get_store_agents(search_query="test query")
|
||||
|
||||
# Verify embedding service was called
|
||||
mock_embedding_service.generate_embedding.assert_called_once_with("test query")
|
||||
|
||||
# Verify results
|
||||
assert isinstance(result.agents, list)
|
||||
assert len(result.agents) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_with_results(mocker):
|
||||
"""Test vector search returns properly formatted results."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query results
|
||||
mock_agents = [
|
||||
{
|
||||
"slug": "test-agent",
|
||||
"agent_name": "Test Agent",
|
||||
"agent_image": ["image.jpg"],
|
||||
"creator_username": "creator",
|
||||
"creator_avatar": "avatar.jpg",
|
||||
"sub_heading": "Test heading",
|
||||
"description": "Test description",
|
||||
"runs": 10,
|
||||
"rating": 4.5,
|
||||
"categories": ["test"],
|
||||
"featured": False,
|
||||
"is_available": True,
|
||||
"updated_at": datetime.now(),
|
||||
"similarity": 0.95,
|
||||
}
|
||||
]
|
||||
mock_count = [{"count": 1}]
|
||||
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[mock_agents, mock_count]),
|
||||
)
|
||||
|
||||
# Call function with search query
|
||||
result = await db.get_store_agents(search_query="test query")
|
||||
|
||||
# Verify results
|
||||
assert len(result.agents) == 1
|
||||
assert result.agents[0].slug == "test-agent"
|
||||
assert result.agents[0].agent_name == "Test Agent"
|
||||
assert result.pagination.total_items == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_get_store_agents_vector_search_with_filters(mocker):
|
||||
"""Test vector search works correctly with additional filters."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(side_effect=[[], [{"count": 0}]]),
|
||||
)
|
||||
|
||||
# Call function with search query and filters
|
||||
await db.get_store_agents(
|
||||
search_query="test query",
|
||||
featured=True,
|
||||
creators=["creator1", "creator2"],
|
||||
category="AI",
|
||||
sorted_by="rating",
|
||||
)
|
||||
|
||||
# Verify query was called with parameterized values
|
||||
# First call is the main query, second is count
|
||||
assert mock_query.call_count == 2
|
||||
|
||||
# Check that the SQL query includes proper parameterization
|
||||
first_call_args = mock_query.call_args_list[0]
|
||||
sql_query = first_call_args[0][0]
|
||||
|
||||
# Verify key elements of the query
|
||||
assert "embedding <=> $1::vector" in sql_query
|
||||
assert "featured = true" in sql_query
|
||||
assert "creator_username = ANY($" in sql_query
|
||||
assert "= ANY(categories)" in sql_query
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_success(mocker):
|
||||
"""Test that embedding generation and storage works correctly."""
|
||||
from backend.integrations.embeddings import EMBEDDING_DIMENSIONS
|
||||
|
||||
# Mock embedding service
|
||||
mock_embedding = [0.1] * EMBEDDING_DIMENSIONS
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
return_value=mock_embedding
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(return_value=[]),
|
||||
)
|
||||
|
||||
# Call the internal function
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="Test Agent",
|
||||
sub_heading="A test agent",
|
||||
description="Does testing",
|
||||
)
|
||||
|
||||
# Verify embedding service was called with combined text
|
||||
mock_embedding_service.generate_embedding.assert_called_once_with(
|
||||
"Test Agent A test agent Does testing"
|
||||
)
|
||||
|
||||
# Verify database update was called
|
||||
mock_query.assert_called_once()
|
||||
call_args = mock_query.call_args
|
||||
assert "UPDATE" in call_args[0][0]
|
||||
assert "embedding = $1::vector" in call_args[0][0]
|
||||
assert call_args[0][2] == "version-123"
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_empty_text(mocker):
|
||||
"""Test that embedding is not generated for empty text."""
|
||||
# Mock embedding service
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock()
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Mock query_raw_with_schema
|
||||
mock_query = mocker.patch(
|
||||
"backend.server.v2.store.db.query_raw_with_schema",
|
||||
mocker.AsyncMock(return_value=[]),
|
||||
)
|
||||
|
||||
# Call with empty fields
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="",
|
||||
sub_heading="",
|
||||
description="",
|
||||
)
|
||||
|
||||
# Verify embedding service was NOT called
|
||||
mock_embedding_service.generate_embedding.assert_not_called()
|
||||
|
||||
# Verify database was NOT updated
|
||||
mock_query.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_generate_and_store_embedding_handles_error(mocker):
|
||||
"""Test that embedding generation errors don't crash the operation."""
|
||||
# Mock embedding service to raise an error
|
||||
mock_embedding_service = mocker.MagicMock()
|
||||
mock_embedding_service.generate_embedding = mocker.AsyncMock(
|
||||
side_effect=Exception("API error")
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.db.get_embedding_service",
|
||||
mocker.AsyncMock(return_value=mock_embedding_service),
|
||||
)
|
||||
|
||||
# Call should not raise - errors are logged but not propagated
|
||||
await db._generate_and_store_embedding(
|
||||
store_listing_version_id="version-123",
|
||||
name="Test Agent",
|
||||
sub_heading="A test agent",
|
||||
description="Does testing",
|
||||
)
|
||||
|
||||
# Verify embedding service was called (and failed)
|
||||
mock_embedding_service.generate_embedding.assert_called_once()
|
||||
|
||||
@@ -44,7 +44,6 @@ class StoreAgentDetails(pydantic.BaseModel):
|
||||
slug: str
|
||||
agent_name: str
|
||||
agent_video: str
|
||||
agent_output_demo: str
|
||||
agent_image: list[str]
|
||||
creator: str
|
||||
creator_avatar: str
|
||||
@@ -122,7 +121,6 @@ class StoreSubmission(pydantic.BaseModel):
|
||||
|
||||
# Additional fields for editing
|
||||
video_url: str | None = None
|
||||
agent_output_demo_url: str | None = None
|
||||
categories: list[str] = []
|
||||
|
||||
|
||||
@@ -159,7 +157,6 @@ class StoreSubmissionRequest(pydantic.BaseModel):
|
||||
name: str
|
||||
sub_heading: str
|
||||
video_url: str | None = None
|
||||
agent_output_demo_url: str | None = None
|
||||
image_urls: list[str] = []
|
||||
description: str = ""
|
||||
instructions: str | None = None
|
||||
@@ -172,7 +169,6 @@ class StoreSubmissionEditRequest(pydantic.BaseModel):
|
||||
name: str
|
||||
sub_heading: str
|
||||
video_url: str | None = None
|
||||
agent_output_demo_url: str | None = None
|
||||
image_urls: list[str] = []
|
||||
description: str = ""
|
||||
instructions: str | None = None
|
||||
|
||||
@@ -62,7 +62,6 @@ def test_store_agent_details():
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
agent_video="video.mp4",
|
||||
agent_output_demo="demo.mp4",
|
||||
agent_image=["image1.jpg", "image2.jpg"],
|
||||
creator="creator1",
|
||||
creator_avatar="avatar.jpg",
|
||||
|
||||
@@ -438,7 +438,6 @@ async def create_submission(
|
||||
slug=submission_request.slug,
|
||||
name=submission_request.name,
|
||||
video_url=submission_request.video_url,
|
||||
agent_output_demo_url=submission_request.agent_output_demo_url,
|
||||
image_urls=submission_request.image_urls,
|
||||
description=submission_request.description,
|
||||
instructions=submission_request.instructions,
|
||||
@@ -482,7 +481,6 @@ async def edit_submission(
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
name=submission_request.name,
|
||||
video_url=submission_request.video_url,
|
||||
agent_output_demo_url=submission_request.agent_output_demo_url,
|
||||
image_urls=submission_request.image_urls,
|
||||
description=submission_request.description,
|
||||
instructions=submission_request.instructions,
|
||||
|
||||
@@ -378,7 +378,6 @@ def test_get_agent_details(
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
agent_video="video.mp4",
|
||||
agent_output_demo="demo.mp4",
|
||||
agent_image=["image1.jpg", "image2.jpg"],
|
||||
creator="creator1",
|
||||
creator_avatar="avatar1.jpg",
|
||||
|
||||
@@ -185,12 +185,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="Number of top blocks with most errors to show when no blocks exceed threshold (0 to disable).",
|
||||
)
|
||||
|
||||
# Execution Accuracy Monitoring
|
||||
execution_accuracy_check_interval_hours: int = Field(
|
||||
default=24,
|
||||
description="Interval in hours between execution accuracy alert checks.",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
extra="allow",
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "StoreListingVersion" ADD COLUMN "agentOutputDemoUrl" TEXT;
|
||||
|
||||
-- Drop and recreate the StoreAgent view with agentOutputDemoUrl field
|
||||
DROP VIEW IF EXISTS "StoreAgent";
|
||||
|
||||
CREATE OR REPLACE VIEW "StoreAgent" AS
|
||||
WITH latest_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
MAX(version) AS max_version
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
),
|
||||
agent_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
array_agg(DISTINCT version::text ORDER BY version::text) AS versions
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
)
|
||||
SELECT
|
||||
sl.id AS listing_id,
|
||||
slv.id AS "storeListingVersionId",
|
||||
slv."createdAt" AS updated_at,
|
||||
sl.slug,
|
||||
COALESCE(slv.name, '') AS agent_name,
|
||||
slv."videoUrl" AS agent_video,
|
||||
slv."agentOutputDemoUrl" AS agent_output_demo,
|
||||
COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image,
|
||||
slv."isFeatured" AS featured,
|
||||
p.username AS creator_username, -- Allow NULL for malformed sub-agents
|
||||
p."avatarUrl" AS creator_avatar, -- Allow NULL for malformed sub-agents
|
||||
slv."subHeading" AS sub_heading,
|
||||
slv.description,
|
||||
slv.categories,
|
||||
slv.search,
|
||||
COALESCE(ar.run_count, 0::bigint) AS runs,
|
||||
COALESCE(rs.avg_rating, 0.0)::double precision AS rating,
|
||||
COALESCE(av.versions, ARRAY[slv.version::text]) AS versions,
|
||||
slv."isAvailable" AS is_available,
|
||||
COALESCE(sl."useForOnboarding", false) AS "useForOnboarding"
|
||||
FROM "StoreListing" sl
|
||||
JOIN latest_versions lv
|
||||
ON sl.id = lv."storeListingId"
|
||||
JOIN "StoreListingVersion" slv
|
||||
ON slv."storeListingId" = lv."storeListingId"
|
||||
AND slv.version = lv.max_version
|
||||
AND slv."submissionStatus" = 'APPROVED'
|
||||
JOIN "AgentGraph" a
|
||||
ON slv."agentGraphId" = a.id
|
||||
AND slv."agentGraphVersion" = a.version
|
||||
LEFT JOIN "Profile" p
|
||||
ON sl."owningUserId" = p."userId"
|
||||
LEFT JOIN "mv_review_stats" rs
|
||||
ON sl.id = rs."storeListingId"
|
||||
LEFT JOIN "mv_agent_run_counts" ar
|
||||
ON a.id = ar."agentGraphId"
|
||||
LEFT JOIN agent_versions av
|
||||
ON sl.id = av."storeListingId"
|
||||
WHERE sl."isDeleted" = false
|
||||
AND sl."hasApprovedVersion" = true;
|
||||
@@ -0,0 +1,92 @@
|
||||
-- Migration: Replace full-text search with pgvector-based vector search
|
||||
-- This migration:
|
||||
-- 1. Enables the pgvector extension
|
||||
-- 2. Drops the StoreAgent view (depends on search column)
|
||||
-- 3. Removes the full-text search infrastructure (trigger, function, tsvector column)
|
||||
-- 4. Adds a vector embedding column for semantic search
|
||||
-- 5. Creates an index for fast vector similarity search
|
||||
-- 6. Recreates the StoreAgent view with the embedding column
|
||||
|
||||
-- Enable pgvector extension
|
||||
CREATE EXTENSION IF NOT EXISTS vector;
|
||||
|
||||
-- First drop the view that depends on the search column
|
||||
DROP VIEW IF EXISTS "StoreAgent";
|
||||
|
||||
-- Remove full-text search infrastructure
|
||||
DROP TRIGGER IF EXISTS "update_tsvector" ON "StoreListingVersion";
|
||||
DROP FUNCTION IF EXISTS update_tsvector_column();
|
||||
|
||||
-- Drop the tsvector search column
|
||||
ALTER TABLE "StoreListingVersion" DROP COLUMN IF EXISTS "search";
|
||||
|
||||
-- Add embedding column for vector search (1536 dimensions for text-embedding-3-small)
|
||||
ALTER TABLE "StoreListingVersion"
|
||||
ADD COLUMN IF NOT EXISTS "embedding" vector(1536);
|
||||
|
||||
-- Create IVFFlat index for fast similarity search
|
||||
-- Using cosine distance (vector_cosine_ops) which is standard for text embeddings
|
||||
-- lists = 100 is appropriate for datasets under 1M rows
|
||||
CREATE INDEX IF NOT EXISTS idx_store_listing_version_embedding
|
||||
ON "StoreListingVersion"
|
||||
USING ivfflat (embedding vector_cosine_ops)
|
||||
WITH (lists = 100);
|
||||
|
||||
-- Recreate StoreAgent view WITHOUT search column, WITH embedding column
|
||||
CREATE OR REPLACE VIEW "StoreAgent" AS
|
||||
WITH latest_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
MAX(version) AS max_version
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
),
|
||||
agent_versions AS (
|
||||
SELECT
|
||||
"storeListingId",
|
||||
array_agg(DISTINCT version::text ORDER BY version::text) AS versions
|
||||
FROM "StoreListingVersion"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
GROUP BY "storeListingId"
|
||||
)
|
||||
SELECT
|
||||
sl.id AS listing_id,
|
||||
slv.id AS "storeListingVersionId",
|
||||
slv."createdAt" AS updated_at,
|
||||
sl.slug,
|
||||
COALESCE(slv.name, '') AS agent_name,
|
||||
slv."videoUrl" AS agent_video,
|
||||
COALESCE(slv."imageUrls", ARRAY[]::text[]) AS agent_image,
|
||||
slv."isFeatured" AS featured,
|
||||
p.username AS creator_username,
|
||||
p."avatarUrl" AS creator_avatar,
|
||||
slv."subHeading" AS sub_heading,
|
||||
slv.description,
|
||||
slv.categories,
|
||||
slv.embedding,
|
||||
COALESCE(ar.run_count, 0::bigint) AS runs,
|
||||
COALESCE(rs.avg_rating, 0.0)::double precision AS rating,
|
||||
COALESCE(av.versions, ARRAY[slv.version::text]) AS versions,
|
||||
COALESCE(sl."useForOnboarding", false) AS "useForOnboarding",
|
||||
slv."isAvailable" AS is_available
|
||||
FROM "StoreListing" sl
|
||||
JOIN latest_versions lv
|
||||
ON sl.id = lv."storeListingId"
|
||||
JOIN "StoreListingVersion" slv
|
||||
ON slv."storeListingId" = lv."storeListingId"
|
||||
AND slv.version = lv.max_version
|
||||
AND slv."submissionStatus" = 'APPROVED'
|
||||
JOIN "AgentGraph" a
|
||||
ON slv."agentGraphId" = a.id
|
||||
AND slv."agentGraphVersion" = a.version
|
||||
LEFT JOIN "Profile" p
|
||||
ON sl."owningUserId" = p."userId"
|
||||
LEFT JOIN "mv_review_stats" rs
|
||||
ON sl.id = rs."storeListingId"
|
||||
LEFT JOIN "mv_agent_run_counts" ar
|
||||
ON a.id = ar."agentGraphId"
|
||||
LEFT JOIN agent_versions av
|
||||
ON sl.id = av."storeListingId"
|
||||
WHERE sl."isDeleted" = false
|
||||
AND sl."hasApprovedVersion" = true;
|
||||
@@ -1,15 +0,0 @@
|
||||
-- Create BuilderSearchHistory table
|
||||
CREATE TABLE "BuilderSearchHistory" (
|
||||
"id" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"searchQuery" TEXT NOT NULL,
|
||||
"filter" TEXT[] DEFAULT ARRAY[]::TEXT[],
|
||||
"byCreator" TEXT[] DEFAULT ARRAY[]::TEXT[],
|
||||
|
||||
CONSTRAINT "BuilderSearchHistory_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- Define User foreign relation
|
||||
ALTER TABLE "BuilderSearchHistory" ADD CONSTRAINT "BuilderSearchHistory_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -114,7 +114,6 @@ cli = "backend.cli:main"
|
||||
format = "linter:format"
|
||||
lint = "linter:lint"
|
||||
test = "run_tests:test"
|
||||
load-store-agents = "test.load_store_agents:run"
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
|
||||
@@ -53,7 +53,6 @@ model User {
|
||||
|
||||
Profile Profile[]
|
||||
UserOnboarding UserOnboarding?
|
||||
BuilderSearchHistory BuilderSearchHistory[]
|
||||
StoreListings StoreListing[]
|
||||
StoreListingReviews StoreListingReview[]
|
||||
StoreVersionsReviewed StoreListingVersion[]
|
||||
@@ -115,19 +114,6 @@ model UserOnboarding {
|
||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
}
|
||||
|
||||
model BuilderSearchHistory {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
|
||||
searchQuery String
|
||||
filter String[] @default([])
|
||||
byCreator String[] @default([])
|
||||
|
||||
userId String
|
||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
}
|
||||
|
||||
// This model describes the Agent Graph/Flow (Multi Agent System).
|
||||
model AgentGraph {
|
||||
id String @default(uuid())
|
||||
@@ -715,11 +701,10 @@ view StoreAgent {
|
||||
storeListingVersionId String
|
||||
updated_at DateTime
|
||||
|
||||
slug String
|
||||
agent_name String
|
||||
agent_video String?
|
||||
agent_output_demo String?
|
||||
agent_image String[]
|
||||
slug String
|
||||
agent_name String
|
||||
agent_video String?
|
||||
agent_image String[]
|
||||
|
||||
featured Boolean @default(false)
|
||||
creator_username String?
|
||||
@@ -727,7 +712,7 @@ view StoreAgent {
|
||||
sub_heading String
|
||||
description String
|
||||
categories String[]
|
||||
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
|
||||
embedding Unsupported("vector(1536)")?
|
||||
runs Int
|
||||
rating Float
|
||||
versions String[]
|
||||
@@ -848,14 +833,13 @@ model StoreListingVersion {
|
||||
AgentGraph AgentGraph @relation(fields: [agentGraphId, agentGraphVersion], references: [id, version])
|
||||
|
||||
// Content fields
|
||||
name String
|
||||
subHeading String
|
||||
videoUrl String?
|
||||
agentOutputDemoUrl String?
|
||||
imageUrls String[]
|
||||
description String
|
||||
instructions String?
|
||||
categories String[]
|
||||
name String
|
||||
subHeading String
|
||||
videoUrl String?
|
||||
imageUrls String[]
|
||||
description String
|
||||
instructions String?
|
||||
categories String[]
|
||||
|
||||
isFeatured Boolean @default(false)
|
||||
|
||||
@@ -863,7 +847,8 @@ model StoreListingVersion {
|
||||
// Old versions can be made unavailable by the author if desired
|
||||
isAvailable Boolean @default(true)
|
||||
|
||||
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
|
||||
// Vector embedding for semantic search (replaces tsvector full-text search)
|
||||
embedding Unsupported("vector(1536)")?
|
||||
|
||||
// Version workflow state
|
||||
submissionStatus SubmissionStatus @default(DRAFT)
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
"slug": "test-agent",
|
||||
"agent_name": "Test Agent",
|
||||
"agent_video": "video.mp4",
|
||||
"agent_output_demo": "demo.mp4",
|
||||
"agent_image": [
|
||||
"image1.jpg",
|
||||
"image2.jpg"
|
||||
|
||||
@@ -23,7 +23,6 @@
|
||||
"reviewed_at": null,
|
||||
"changes_summary": null,
|
||||
"video_url": "test.mp4",
|
||||
"agent_output_demo_url": null,
|
||||
"categories": [
|
||||
"test-category"
|
||||
]
|
||||
|
||||
@@ -10,7 +10,7 @@ It creates:
|
||||
|
||||
Usage:
|
||||
cd backend
|
||||
poetry run load-store-agents
|
||||
poetry run python test/load_store_agents.py
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
@@ -37,8 +37,7 @@ from prisma.types import (
|
||||
AGENTS_DIR = Path(__file__).parent.parent / "agents"
|
||||
CSV_FILE = AGENTS_DIR / "StoreAgent_rows.csv"
|
||||
|
||||
# User constants for the autogpt creator (test data, not production)
|
||||
# Fixed uuid4 for idempotency - same user is reused across script runs
|
||||
# Fixed user ID for the autogpt creator (test data, not production)
|
||||
AUTOGPT_USER_ID = "79d96c73-e6f5-4656-a83a-185b41ee0d06"
|
||||
AUTOGPT_EMAIL = "autogpt-test@agpt.co"
|
||||
AUTOGPT_USERNAME = "autogpt"
|
||||
@@ -254,12 +253,7 @@ async def create_agent_graph(
|
||||
for node in nodes:
|
||||
block_id = node["block_id"]
|
||||
# Ensure the block exists (create placeholder if needed)
|
||||
block_exists = await ensure_block_exists(db, block_id, known_blocks)
|
||||
if not block_exists:
|
||||
print(
|
||||
f" Skipping node {node['id']} - block {block_id} could not be created"
|
||||
)
|
||||
continue
|
||||
await ensure_block_exists(db, block_id, known_blocks)
|
||||
|
||||
await db.agentnode.create(
|
||||
data=AgentNodeCreateInput(
|
||||
@@ -359,7 +353,7 @@ async def create_store_listing(
|
||||
if is_approved:
|
||||
await db.storelisting.update(
|
||||
where={"id": listing_id},
|
||||
data={"ActiveVersion": {"connect": {"id": version_id}}},
|
||||
data={"activeVersionId": version_id},
|
||||
)
|
||||
|
||||
|
||||
@@ -393,7 +387,6 @@ async def main():
|
||||
|
||||
# Build mapping from version_id to json file
|
||||
loaded_graphs = {} # graph_id -> (graph_id, version)
|
||||
failed_agents = []
|
||||
|
||||
for json_file in json_files:
|
||||
# Extract the version ID from filename (agent_<version_id>.json)
|
||||
@@ -406,25 +399,17 @@ async def main():
|
||||
continue
|
||||
|
||||
metadata = csv_metadata[version_id]
|
||||
agent_name = metadata["agent_name"]
|
||||
print(f"\nProcessing: {agent_name}")
|
||||
print(f"\nProcessing: {metadata['agent_name']}")
|
||||
|
||||
# Use a transaction per agent to prevent dangling resources
|
||||
try:
|
||||
async with db.tx() as tx:
|
||||
# Load and create the agent graph
|
||||
agent_data = await load_agent_json(json_file)
|
||||
graph_id, graph_version = await create_agent_graph(
|
||||
tx, agent_data, known_blocks
|
||||
)
|
||||
loaded_graphs[graph_id] = (graph_id, graph_version)
|
||||
# Load and create the agent graph
|
||||
agent_data = await load_agent_json(json_file)
|
||||
graph_id, graph_version = await create_agent_graph(
|
||||
db, agent_data, known_blocks
|
||||
)
|
||||
loaded_graphs[graph_id] = (graph_id, graph_version)
|
||||
|
||||
# Create store listing
|
||||
await create_store_listing(tx, graph_id, graph_version, metadata)
|
||||
except Exception as e:
|
||||
print(f" Error loading agent '{agent_name}': {e}")
|
||||
failed_agents.append(agent_name)
|
||||
continue
|
||||
# Create store listing
|
||||
await create_store_listing(db, graph_id, graph_version, metadata)
|
||||
|
||||
# Step 4: Refresh materialized views
|
||||
print("\n[Step 4] Refreshing materialized views...")
|
||||
@@ -436,20 +421,11 @@ async def main():
|
||||
|
||||
print("\n" + "=" * 60)
|
||||
print(f"Successfully loaded {len(loaded_graphs)} agents")
|
||||
if failed_agents:
|
||||
print(
|
||||
f"Failed to load {len(failed_agents)} agents: {', '.join(failed_agents)}"
|
||||
)
|
||||
print("=" * 60)
|
||||
|
||||
finally:
|
||||
await db.disconnect()
|
||||
|
||||
|
||||
def run():
|
||||
"""Entry point for poetry script."""
|
||||
asyncio.run(main())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
||||
asyncio.run(main())
|
||||
|
||||
@@ -82,7 +82,7 @@
|
||||
"lodash": "4.17.21",
|
||||
"lucide-react": "0.552.0",
|
||||
"moment": "2.30.1",
|
||||
"next": "15.4.10",
|
||||
"next": "15.4.7",
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.7.2",
|
||||
"party-js": "2.2.0",
|
||||
|
||||
126
autogpt_platform/frontend/pnpm-lock.yaml
generated
126
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -16,7 +16,7 @@ importers:
|
||||
version: 5.2.2(react-hook-form@7.66.0(react@18.3.1))
|
||||
'@next/third-parties':
|
||||
specifier: 15.4.6
|
||||
version: 15.4.6(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
version: 15.4.6(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
'@phosphor-icons/react':
|
||||
specifier: 2.1.10
|
||||
version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -88,7 +88,7 @@ importers:
|
||||
version: 5.24.13(@rjsf/utils@5.24.13(react@18.3.1))
|
||||
'@sentry/nextjs':
|
||||
specifier: 10.27.0
|
||||
version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))
|
||||
version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))
|
||||
'@supabase/ssr':
|
||||
specifier: 0.7.0
|
||||
version: 0.7.0(@supabase/supabase-js@2.78.0)
|
||||
@@ -106,10 +106,10 @@ importers:
|
||||
version: 0.2.4
|
||||
'@vercel/analytics':
|
||||
specifier: 1.5.0
|
||||
version: 1.5.0(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
version: 1.5.0(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
'@vercel/speed-insights':
|
||||
specifier: 1.2.0
|
||||
version: 1.2.0(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
version: 1.2.0(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
'@xyflow/react':
|
||||
specifier: 12.9.2
|
||||
version: 12.9.2(@types/react@18.3.17)(immer@10.1.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
@@ -148,7 +148,7 @@ importers:
|
||||
version: 12.23.24(@emotion/is-prop-valid@1.2.2)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
geist:
|
||||
specifier: 1.5.1
|
||||
version: 1.5.1(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))
|
||||
version: 1.5.1(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))
|
||||
highlight.js:
|
||||
specifier: 11.11.1
|
||||
version: 11.11.1
|
||||
@@ -171,14 +171,14 @@ importers:
|
||||
specifier: 2.30.1
|
||||
version: 2.30.1
|
||||
next:
|
||||
specifier: 15.4.10
|
||||
version: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
specifier: 15.4.7
|
||||
version: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next-themes:
|
||||
specifier: 0.4.6
|
||||
version: 0.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
nuqs:
|
||||
specifier: 2.7.2
|
||||
version: 2.7.2(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
version: 2.7.2(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)
|
||||
party-js:
|
||||
specifier: 2.2.0
|
||||
version: 2.2.0
|
||||
@@ -284,7 +284,7 @@ importers:
|
||||
version: 9.1.5(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))
|
||||
'@storybook/nextjs':
|
||||
specifier: 9.1.5
|
||||
version: 9.1.5(esbuild@0.25.9)(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(type-fest@4.41.0)(typescript@5.9.3)(webpack-hot-middleware@2.26.1)(webpack@5.101.3(esbuild@0.25.9))
|
||||
version: 9.1.5(esbuild@0.25.9)(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(type-fest@4.41.0)(typescript@5.9.3)(webpack-hot-middleware@2.26.1)(webpack@5.101.3(esbuild@0.25.9))
|
||||
'@tanstack/eslint-plugin-query':
|
||||
specifier: 5.91.2
|
||||
version: 5.91.2(eslint@8.57.1)(typescript@5.9.3)
|
||||
@@ -1602,56 +1602,56 @@ packages:
|
||||
'@neoconfetti/react@1.0.0':
|
||||
resolution: {integrity: sha512-klcSooChXXOzIm+SE5IISIAn3bYzYfPjbX7D7HoqZL84oAfgREeSg5vSIaSFH+DaGzzvImTyWe1OyrJ67vik4A==}
|
||||
|
||||
'@next/env@15.4.10':
|
||||
resolution: {integrity: sha512-knhmoJ0Vv7VRf6pZEPSnciUG1S4bIhWx+qTYBW/AjxEtlzsiNORPk8sFDCEvqLfmKuey56UB9FL1UdHEV3uBrg==}
|
||||
'@next/env@15.4.7':
|
||||
resolution: {integrity: sha512-PrBIpO8oljZGTOe9HH0miix1w5MUiGJ/q83Jge03mHEE0E3pyqzAy2+l5G6aJDbXoobmxPJTVhbCuwlLtjSHwg==}
|
||||
|
||||
'@next/eslint-plugin-next@15.5.2':
|
||||
resolution: {integrity: sha512-lkLrRVxcftuOsJNhWatf1P2hNVfh98k/omQHrCEPPriUypR6RcS13IvLdIrEvkm9AH2Nu2YpR5vLqBuy6twH3Q==}
|
||||
|
||||
'@next/swc-darwin-arm64@15.4.8':
|
||||
resolution: {integrity: sha512-Pf6zXp7yyQEn7sqMxur6+kYcywx5up1J849psyET7/8pG2gQTVMjU3NzgIt8SeEP5to3If/SaWmaA6H6ysBr1A==}
|
||||
'@next/swc-darwin-arm64@15.4.7':
|
||||
resolution: {integrity: sha512-2Dkb+VUTp9kHHkSqtws4fDl2Oxms29HcZBwFIda1X7Ztudzy7M6XF9HDS2dq85TmdN47VpuhjE+i6wgnIboVzQ==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [darwin]
|
||||
|
||||
'@next/swc-darwin-x64@15.4.8':
|
||||
resolution: {integrity: sha512-xla6AOfz68a6kq3gRQccWEvFC/VRGJmA/QuSLENSO7CZX5WIEkSz7r1FdXUjtGCQ1c2M+ndUAH7opdfLK1PQbw==}
|
||||
'@next/swc-darwin-x64@15.4.7':
|
||||
resolution: {integrity: sha512-qaMnEozKdWezlmh1OGDVFueFv2z9lWTcLvt7e39QA3YOvZHNpN2rLs/IQLwZaUiw2jSvxW07LxMCWtOqsWFNQg==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [darwin]
|
||||
|
||||
'@next/swc-linux-arm64-gnu@15.4.8':
|
||||
resolution: {integrity: sha512-y3fmp+1Px/SJD+5ntve5QLZnGLycsxsVPkTzAc3zUiXYSOlTPqT8ynfmt6tt4fSo1tAhDPmryXpYKEAcoAPDJw==}
|
||||
'@next/swc-linux-arm64-gnu@15.4.7':
|
||||
resolution: {integrity: sha512-ny7lODPE7a15Qms8LZiN9wjNWIeI+iAZOFDOnv2pcHStncUr7cr9lD5XF81mdhrBXLUP9yT9RzlmSWKIazWoDw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-arm64-musl@15.4.8':
|
||||
resolution: {integrity: sha512-DX/L8VHzrr1CfwaVjBQr3GWCqNNFgyWJbeQ10Lx/phzbQo3JNAxUok1DZ8JHRGcL6PgMRgj6HylnLNndxn4Z6A==}
|
||||
'@next/swc-linux-arm64-musl@15.4.7':
|
||||
resolution: {integrity: sha512-4SaCjlFR/2hGJqZLLWycccy1t+wBrE/vyJWnYaZJhUVHccpGLG5q0C+Xkw4iRzUIkE+/dr90MJRUym3s1+vO8A==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-x64-gnu@15.4.8':
|
||||
resolution: {integrity: sha512-9fLAAXKAL3xEIFdKdzG5rUSvSiZTLLTCc6JKq1z04DR4zY7DbAPcRvNm3K1inVhTiQCs19ZRAgUerHiVKMZZIA==}
|
||||
'@next/swc-linux-x64-gnu@15.4.7':
|
||||
resolution: {integrity: sha512-2uNXjxvONyRidg00VwvlTYDwC9EgCGNzPAPYbttIATZRxmOZ3hllk/YYESzHZb65eyZfBR5g9xgCZjRAl9YYGg==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-linux-x64-musl@15.4.8':
|
||||
resolution: {integrity: sha512-s45V7nfb5g7dbS7JK6XZDcapicVrMMvX2uYgOHP16QuKH/JA285oy6HcxlKqwUNaFY/UC6EvQ8QZUOo19cBKSA==}
|
||||
'@next/swc-linux-x64-musl@15.4.7':
|
||||
resolution: {integrity: sha512-ceNbPjsFgLscYNGKSu4I6LYaadq2B8tcK116nVuInpHHdAWLWSwVK6CHNvCi0wVS9+TTArIFKJGsEyVD1H+4Kg==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [linux]
|
||||
|
||||
'@next/swc-win32-arm64-msvc@15.4.8':
|
||||
resolution: {integrity: sha512-KjgeQyOAq7t/HzAJcWPGA8X+4WY03uSCZ2Ekk98S9OgCFsb6lfBE3dbUzUuEQAN2THbwYgFfxX2yFTCMm8Kehw==}
|
||||
'@next/swc-win32-arm64-msvc@15.4.7':
|
||||
resolution: {integrity: sha512-pZyxmY1iHlZJ04LUL7Css8bNvsYAMYOY9JRwFA3HZgpaNKsJSowD09Vg2R9734GxAcLJc2KDQHSCR91uD6/AAw==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [arm64]
|
||||
os: [win32]
|
||||
|
||||
'@next/swc-win32-x64-msvc@15.4.8':
|
||||
resolution: {integrity: sha512-Exsmf/+42fWVnLMaZHzshukTBxZrSwuuLKFvqhGHJ+mC1AokqieLY/XzAl3jc/CqhXLqLY3RRjkKJ9YnLPcRWg==}
|
||||
'@next/swc-win32-x64-msvc@15.4.7':
|
||||
resolution: {integrity: sha512-HjuwPJ7BeRzgl3KrjKqD2iDng0eQIpIReyhpF5r4yeAHFwWRuAhfW92rWv/r3qeQHEwHsLRzFDvMqRjyM5DI6A==}
|
||||
engines: {node: '>= 10'}
|
||||
cpu: [x64]
|
||||
os: [win32]
|
||||
@@ -5920,8 +5920,8 @@ packages:
|
||||
react: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
react-dom: ^16.8 || ^17 || ^18 || ^19 || ^19.0.0-rc
|
||||
|
||||
next@15.4.10:
|
||||
resolution: {integrity: sha512-itVlc79QjpKMFMRhP+kbGKaSG/gZM6RCvwhEbwmCNF06CdDiNaoHcbeg0PqkEa2GOcn8KJ0nnc7+yL7EjoYLHQ==}
|
||||
next@15.4.7:
|
||||
resolution: {integrity: sha512-OcqRugwF7n7mC8OSYjvsZhhG1AYSvulor1EIUsIkbbEbf1qoE5EbH36Swj8WhF4cHqmDgkiam3z1c1W0J1Wifg==}
|
||||
engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0}
|
||||
hasBin: true
|
||||
peerDependencies:
|
||||
@@ -9003,39 +9003,39 @@ snapshots:
|
||||
|
||||
'@neoconfetti/react@1.0.0': {}
|
||||
|
||||
'@next/env@15.4.10': {}
|
||||
'@next/env@15.4.7': {}
|
||||
|
||||
'@next/eslint-plugin-next@15.5.2':
|
||||
dependencies:
|
||||
fast-glob: 3.3.1
|
||||
|
||||
'@next/swc-darwin-arm64@15.4.8':
|
||||
'@next/swc-darwin-arm64@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-darwin-x64@15.4.8':
|
||||
'@next/swc-darwin-x64@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-arm64-gnu@15.4.8':
|
||||
'@next/swc-linux-arm64-gnu@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-arm64-musl@15.4.8':
|
||||
'@next/swc-linux-arm64-musl@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-x64-gnu@15.4.8':
|
||||
'@next/swc-linux-x64-gnu@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-linux-x64-musl@15.4.8':
|
||||
'@next/swc-linux-x64-musl@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-win32-arm64-msvc@15.4.8':
|
||||
'@next/swc-win32-arm64-msvc@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/swc-win32-x64-msvc@15.4.8':
|
||||
'@next/swc-win32-x64-msvc@15.4.7':
|
||||
optional: true
|
||||
|
||||
'@next/third-parties@15.4.6(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
'@next/third-parties@15.4.6(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
dependencies:
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
react: 18.3.1
|
||||
third-party-capital: 1.0.20
|
||||
|
||||
@@ -10267,7 +10267,7 @@ snapshots:
|
||||
|
||||
'@sentry/core@10.27.0': {}
|
||||
|
||||
'@sentry/nextjs@10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))':
|
||||
'@sentry/nextjs@10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))':
|
||||
dependencies:
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@opentelemetry/semantic-conventions': 1.37.0
|
||||
@@ -10280,7 +10280,7 @@ snapshots:
|
||||
'@sentry/react': 10.27.0(react@18.3.1)
|
||||
'@sentry/vercel-edge': 10.27.0
|
||||
'@sentry/webpack-plugin': 4.3.0(webpack@5.101.3(esbuild@0.25.9))
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
resolve: 1.22.8
|
||||
rollup: 4.52.2
|
||||
stacktrace-parser: 0.1.11
|
||||
@@ -10642,7 +10642,7 @@ snapshots:
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
|
||||
'@storybook/nextjs@9.1.5(esbuild@0.25.9)(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(type-fest@4.41.0)(typescript@5.9.3)(webpack-hot-middleware@2.26.1)(webpack@5.101.3(esbuild@0.25.9))':
|
||||
'@storybook/nextjs@9.1.5(esbuild@0.25.9)(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2))(type-fest@4.41.0)(typescript@5.9.3)(webpack-hot-middleware@2.26.1)(webpack@5.101.3(esbuild@0.25.9))':
|
||||
dependencies:
|
||||
'@babel/core': 7.28.4
|
||||
'@babel/plugin-syntax-bigint': 7.8.3(@babel/core@7.28.4)
|
||||
@@ -10666,7 +10666,7 @@ snapshots:
|
||||
css-loader: 6.11.0(webpack@5.101.3(esbuild@0.25.9))
|
||||
image-size: 2.0.2
|
||||
loader-utils: 3.3.1
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
node-polyfill-webpack-plugin: 2.0.1(webpack@5.101.3(esbuild@0.25.9))
|
||||
postcss: 8.5.6
|
||||
postcss-loader: 8.2.0(postcss@8.5.6)(typescript@5.9.3)(webpack@5.101.3(esbuild@0.25.9))
|
||||
@@ -11271,14 +11271,14 @@ snapshots:
|
||||
'@unrs/resolver-binding-win32-x64-msvc@1.11.1':
|
||||
optional: true
|
||||
|
||||
'@vercel/analytics@1.5.0(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
'@vercel/analytics@1.5.0(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
optionalDependencies:
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
react: 18.3.1
|
||||
|
||||
'@vercel/speed-insights@1.2.0(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
'@vercel/speed-insights@1.2.0(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)':
|
||||
optionalDependencies:
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
react: 18.3.1
|
||||
|
||||
'@vitest/expect@3.2.4':
|
||||
@@ -12954,9 +12954,9 @@ snapshots:
|
||||
|
||||
functions-have-names@1.2.3: {}
|
||||
|
||||
geist@1.5.1(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)):
|
||||
geist@1.5.1(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)):
|
||||
dependencies:
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
|
||||
gensync@1.0.0-beta.2: {}
|
||||
|
||||
@@ -14226,9 +14226,9 @@ snapshots:
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
|
||||
next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
|
||||
next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
|
||||
dependencies:
|
||||
'@next/env': 15.4.10
|
||||
'@next/env': 15.4.7
|
||||
'@swc/helpers': 0.5.15
|
||||
caniuse-lite: 1.0.30001741
|
||||
postcss: 8.4.31
|
||||
@@ -14236,14 +14236,14 @@ snapshots:
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
styled-jsx: 5.1.6(@babel/core@7.28.4)(react@18.3.1)
|
||||
optionalDependencies:
|
||||
'@next/swc-darwin-arm64': 15.4.8
|
||||
'@next/swc-darwin-x64': 15.4.8
|
||||
'@next/swc-linux-arm64-gnu': 15.4.8
|
||||
'@next/swc-linux-arm64-musl': 15.4.8
|
||||
'@next/swc-linux-x64-gnu': 15.4.8
|
||||
'@next/swc-linux-x64-musl': 15.4.8
|
||||
'@next/swc-win32-arm64-msvc': 15.4.8
|
||||
'@next/swc-win32-x64-msvc': 15.4.8
|
||||
'@next/swc-darwin-arm64': 15.4.7
|
||||
'@next/swc-darwin-x64': 15.4.7
|
||||
'@next/swc-linux-arm64-gnu': 15.4.7
|
||||
'@next/swc-linux-arm64-musl': 15.4.7
|
||||
'@next/swc-linux-x64-gnu': 15.4.7
|
||||
'@next/swc-linux-x64-musl': 15.4.7
|
||||
'@next/swc-win32-arm64-msvc': 15.4.7
|
||||
'@next/swc-win32-x64-msvc': 15.4.7
|
||||
'@opentelemetry/api': 1.9.0
|
||||
'@playwright/test': 1.56.1
|
||||
sharp: 0.34.3
|
||||
@@ -14321,12 +14321,12 @@ snapshots:
|
||||
dependencies:
|
||||
boolbase: 1.0.0
|
||||
|
||||
nuqs@2.7.2(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1):
|
||||
nuqs@2.7.2(next@15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1):
|
||||
dependencies:
|
||||
'@standard-schema/spec': 1.0.0
|
||||
react: 18.3.1
|
||||
optionalDependencies:
|
||||
next: 15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
next: 15.4.7(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
|
||||
oas-kit-common@1.0.8:
|
||||
dependencies:
|
||||
@@ -15340,7 +15340,7 @@ snapshots:
|
||||
dependencies:
|
||||
color: 4.2.3
|
||||
detect-libc: 2.0.4
|
||||
semver: 7.7.3
|
||||
semver: 7.7.2
|
||||
optionalDependencies:
|
||||
'@img/sharp-darwin-arm64': 0.34.3
|
||||
'@img/sharp-darwin-x64': 0.34.3
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
"use client";
|
||||
import { StoreAgentDetails } from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { isEmptyOrWhitespace } from "@/lib/utils";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
@@ -11,17 +13,15 @@ import {
|
||||
OnboardingStep,
|
||||
} from "../components/OnboardingStep";
|
||||
import { OnboardingText } from "../components/OnboardingText";
|
||||
import { getV1RecommendedOnboardingAgents } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
import { resolveResponse } from "@/app/api/helpers";
|
||||
import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails";
|
||||
|
||||
export default function Page() {
|
||||
const { state, updateState, completeStep } = useOnboarding(4, "INTEGRATIONS");
|
||||
const [agents, setAgents] = useState<StoreAgentDetails[]>([]);
|
||||
const api = useBackendAPI();
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
resolveResponse(getV1RecommendedOnboardingAgents()).then((agents) => {
|
||||
api.getOnboardingAgents().then((agents) => {
|
||||
if (agents.length < 2) {
|
||||
completeStep("CONGRATS");
|
||||
router.replace("/");
|
||||
|
||||
@@ -12,9 +12,6 @@ import {
|
||||
useGetV2GetAgentByVersion,
|
||||
useGetV2GetAgentGraph,
|
||||
} from "@/app/api/__generated__/endpoints/store/store";
|
||||
import { resolveResponse } from "@/app/api/helpers";
|
||||
import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { GraphID } from "@/lib/autogpt-server-api";
|
||||
|
||||
export function useOnboardingRunStep() {
|
||||
const onboarding = useOnboarding(undefined, "AGENT_CHOICE");
|
||||
@@ -80,7 +77,12 @@ export function useOnboardingRunStep() {
|
||||
|
||||
setShowInput(true);
|
||||
onboarding.setStep(6);
|
||||
onboarding.completeStep("AGENT_NEW_RUN");
|
||||
onboarding.updateState({
|
||||
completedSteps: [
|
||||
...(onboarding.state.completedSteps || []),
|
||||
"AGENT_NEW_RUN",
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
function handleSetAgentInput(key: string, value: string) {
|
||||
@@ -109,22 +111,21 @@ export function useOnboardingRunStep() {
|
||||
setRunningAgent(true);
|
||||
|
||||
try {
|
||||
const libraryAgent = await resolveResponse(
|
||||
postV2AddMarketplaceAgent({
|
||||
store_listing_version_id: storeAgent?.store_listing_version_id || "",
|
||||
source: "onboarding",
|
||||
}),
|
||||
const libraryAgent = await api.addMarketplaceAgentToLibrary(
|
||||
storeAgent?.store_listing_version_id || "",
|
||||
);
|
||||
|
||||
const { id: runID } = await api.executeGraph(
|
||||
libraryAgent.graph_id as GraphID,
|
||||
libraryAgent.graph_id,
|
||||
libraryAgent.graph_version,
|
||||
onboarding.state.agentInput || {},
|
||||
inputCredentials,
|
||||
"onboarding",
|
||||
);
|
||||
|
||||
onboarding.updateState({ onboardingAgentExecutionId: runID });
|
||||
onboarding.updateState({
|
||||
onboardingAgentExecutionId: runID,
|
||||
agentRuns: (onboarding.state.agentRuns || 0) + 1,
|
||||
});
|
||||
|
||||
router.push("/onboarding/6-congrats");
|
||||
} catch (error) {
|
||||
|
||||
@@ -5,9 +5,6 @@ import { useRouter } from "next/navigation";
|
||||
import * as party from "party-js";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useOnboarding } from "../../../../providers/onboarding/onboarding-provider";
|
||||
import { resolveResponse } from "@/app/api/helpers";
|
||||
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
import { postV2AddMarketplaceAgent } from "@/app/api/__generated__/endpoints/library/library";
|
||||
|
||||
export default function Page() {
|
||||
const { completeStep } = useOnboarding(7, "AGENT_INPUT");
|
||||
@@ -40,15 +37,11 @@ export default function Page() {
|
||||
completeStep("CONGRATS");
|
||||
|
||||
try {
|
||||
const onboarding = await resolveResponse(getV1OnboardingState());
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
if (onboarding?.selectedStoreListingVersionId) {
|
||||
try {
|
||||
const libraryAgent = await resolveResponse(
|
||||
postV2AddMarketplaceAgent({
|
||||
store_listing_version_id:
|
||||
onboarding.selectedStoreListingVersionId,
|
||||
source: "onboarding",
|
||||
}),
|
||||
const libraryAgent = await api.addMarketplaceAgentToLibrary(
|
||||
onboarding.selectedStoreListingVersionId,
|
||||
);
|
||||
router.replace(`/library/agents/${libraryAgent.id}`);
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import StarRating from "./StarRating";
|
||||
import { StoreAgentDetails } from "@/lib/autogpt-server-api";
|
||||
import SmartImage from "@/components/__legacy__/SmartImage";
|
||||
import { StoreAgentDetails } from "@/app/api/__generated__/models/storeAgentDetails";
|
||||
|
||||
type OnboardingAgentCardProps = {
|
||||
agent?: StoreAgentDetails;
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
"use client";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { resolveResponse, shouldShowOnboarding } from "@/app/api/helpers";
|
||||
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
|
||||
export default function OnboardingPage() {
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
|
||||
useEffect(() => {
|
||||
async function redirectToStep() {
|
||||
try {
|
||||
// Check if onboarding is enabled
|
||||
const isEnabled = await shouldShowOnboarding();
|
||||
const isEnabled = await api.isOnboardingEnabled();
|
||||
if (!isEnabled) {
|
||||
router.replace("/");
|
||||
return;
|
||||
}
|
||||
|
||||
const onboarding = await resolveResponse(getV1OnboardingState());
|
||||
const onboarding = await api.getUserOnboarding();
|
||||
|
||||
// Handle completed onboarding
|
||||
if (onboarding.completedSteps.includes("GET_RESULTS")) {
|
||||
@@ -66,7 +66,7 @@ export default function OnboardingPage() {
|
||||
}
|
||||
|
||||
redirectToStep();
|
||||
}, [router]);
|
||||
}, [api, router]);
|
||||
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import {
|
||||
LineChart,
|
||||
Line,
|
||||
XAxis,
|
||||
YAxis,
|
||||
CartesianGrid,
|
||||
Tooltip,
|
||||
Legend,
|
||||
ResponsiveContainer,
|
||||
} from "recharts";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/__legacy__/ui/input";
|
||||
import { Label } from "@/components/__legacy__/ui/label";
|
||||
@@ -28,12 +18,9 @@ import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import {
|
||||
usePostV2GenerateExecutionAnalytics,
|
||||
useGetV2GetExecutionAnalyticsConfiguration,
|
||||
useGetV2GetExecutionAccuracyTrendsAndAlerts,
|
||||
} from "@/app/api/__generated__/endpoints/admin/admin";
|
||||
import type { ExecutionAnalyticsRequest } from "@/app/api/__generated__/models/executionAnalyticsRequest";
|
||||
import type { ExecutionAnalyticsResponse } from "@/app/api/__generated__/models/executionAnalyticsResponse";
|
||||
import type { AccuracyTrendsResponse } from "@/app/api/__generated__/models/accuracyTrendsResponse";
|
||||
import type { AccuracyLatestData } from "@/app/api/__generated__/models/accuracyLatestData";
|
||||
|
||||
// Use the generated type with minimal adjustment for form handling
|
||||
interface FormData extends Omit<ExecutionAnalyticsRequest, "created_after"> {
|
||||
@@ -46,133 +33,8 @@ export function ExecutionAnalyticsForm() {
|
||||
const [results, setResults] = useState<ExecutionAnalyticsResponse | null>(
|
||||
null,
|
||||
);
|
||||
const [trendsData, setTrendsData] = useState<AccuracyTrendsResponse | null>(
|
||||
null,
|
||||
);
|
||||
const { toast } = useToast();
|
||||
|
||||
// State for accuracy trends query parameters
|
||||
const [accuracyParams, setAccuracyParams] = useState<{
|
||||
graph_id: string;
|
||||
user_id?: string;
|
||||
days_back: number;
|
||||
drop_threshold: number;
|
||||
include_historical?: boolean;
|
||||
} | null>(null);
|
||||
|
||||
// Use the generated API client for accuracy trends (GET)
|
||||
const { data: accuracyApiResponse, error: accuracyError } =
|
||||
useGetV2GetExecutionAccuracyTrendsAndAlerts(
|
||||
accuracyParams || {
|
||||
graph_id: "",
|
||||
days_back: 30,
|
||||
drop_threshold: 10.0,
|
||||
include_historical: false,
|
||||
},
|
||||
{
|
||||
query: {
|
||||
enabled: !!accuracyParams?.graph_id,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Update local state when data changes and handle success/error
|
||||
useEffect(() => {
|
||||
if (accuracyError) {
|
||||
console.error("Failed to fetch trends:", accuracyError);
|
||||
toast({
|
||||
title: "Trends Error",
|
||||
description:
|
||||
(accuracyError as any)?.message || "Failed to fetch accuracy trends",
|
||||
variant: "destructive",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const data = accuracyApiResponse?.data;
|
||||
if (data && "latest_data" in data) {
|
||||
setTrendsData(data);
|
||||
|
||||
// Check for alerts
|
||||
if (data.alert) {
|
||||
toast({
|
||||
title: "🚨 Accuracy Alert Detected",
|
||||
description: `${data.alert.drop_percent.toFixed(1)}% accuracy drop detected for this agent`,
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
}
|
||||
}, [accuracyApiResponse, accuracyError, toast]);
|
||||
|
||||
// Chart component for accuracy trends
|
||||
function AccuracyChart({ data }: { data: AccuracyLatestData[] }) {
|
||||
const chartData = data.map((item) => ({
|
||||
date: new Date(item.date).toLocaleDateString(),
|
||||
"Daily Score": item.daily_score,
|
||||
"3-Day Avg": item.three_day_avg,
|
||||
"7-Day Avg": item.seven_day_avg,
|
||||
"14-Day Avg": item.fourteen_day_avg,
|
||||
}));
|
||||
|
||||
return (
|
||||
<ResponsiveContainer width="100%" height={400}>
|
||||
<LineChart
|
||||
data={chartData}
|
||||
margin={{ top: 5, right: 30, left: 20, bottom: 5 }}
|
||||
>
|
||||
<CartesianGrid strokeDasharray="3 3" />
|
||||
<XAxis dataKey="date" />
|
||||
<YAxis domain={[0, 100]} />
|
||||
<Tooltip
|
||||
formatter={(value) => [`${Number(value).toFixed(2)}%`, ""]}
|
||||
/>
|
||||
<Legend />
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="Daily Score"
|
||||
stroke="#3b82f6"
|
||||
strokeWidth={2}
|
||||
dot={{ r: 3 }}
|
||||
/>
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="3-Day Avg"
|
||||
stroke="#10b981"
|
||||
strokeWidth={2}
|
||||
dot={{ r: 3 }}
|
||||
/>
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="7-Day Avg"
|
||||
stroke="#f59e0b"
|
||||
strokeWidth={2}
|
||||
dot={{ r: 3 }}
|
||||
/>
|
||||
<Line
|
||||
type="monotone"
|
||||
dataKey="14-Day Avg"
|
||||
stroke="#8b5cf6"
|
||||
strokeWidth={2}
|
||||
dot={{ r: 3 }}
|
||||
/>
|
||||
</LineChart>
|
||||
</ResponsiveContainer>
|
||||
);
|
||||
}
|
||||
|
||||
// Function to fetch accuracy trends using generated API client
|
||||
const fetchAccuracyTrends = (graphId: string, userId?: string) => {
|
||||
if (!graphId.trim()) return;
|
||||
|
||||
setAccuracyParams({
|
||||
graph_id: graphId.trim(),
|
||||
user_id: userId?.trim() || undefined,
|
||||
days_back: 30,
|
||||
drop_threshold: 10.0,
|
||||
include_historical: showAccuracyChart, // Include historical data when chart is enabled
|
||||
});
|
||||
};
|
||||
|
||||
// Fetch configuration from API
|
||||
const {
|
||||
data: config,
|
||||
@@ -188,7 +50,6 @@ export function ExecutionAnalyticsForm() {
|
||||
}
|
||||
const result = res.data;
|
||||
setResults(result);
|
||||
|
||||
toast({
|
||||
title: "Analytics Generated",
|
||||
description: `Processed ${result.processed_executions} executions. ${result.successful_analytics} successful, ${result.failed_analytics} failed, ${result.skipped_executions} skipped.`,
|
||||
@@ -197,21 +58,11 @@ export function ExecutionAnalyticsForm() {
|
||||
},
|
||||
onError: (error: any) => {
|
||||
console.error("Analytics generation error:", error);
|
||||
|
||||
const errorMessage =
|
||||
error?.message || error?.detail || "An unexpected error occurred";
|
||||
const isOpenAIError = errorMessage.includes(
|
||||
"OpenAI API key not configured",
|
||||
);
|
||||
|
||||
toast({
|
||||
title: isOpenAIError
|
||||
? "Analytics Generation Skipped"
|
||||
: "Analytics Generation Failed",
|
||||
description: isOpenAIError
|
||||
? "Analytics generation requires OpenAI configuration, but accuracy trends are still available above."
|
||||
: errorMessage,
|
||||
variant: isOpenAIError ? "default" : "destructive",
|
||||
title: "Analytics Generation Failed",
|
||||
description:
|
||||
error?.message || error?.detail || "An unexpected error occurred",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
@@ -226,9 +77,6 @@ export function ExecutionAnalyticsForm() {
|
||||
user_prompt: "", // Will use config default when empty
|
||||
});
|
||||
|
||||
// State for accuracy trends chart toggle
|
||||
const [showAccuracyChart, setShowAccuracyChart] = useState(true);
|
||||
|
||||
// Update form defaults when config loads
|
||||
useEffect(() => {
|
||||
if (config?.data && config.status === 200 && !formData.model_name) {
|
||||
@@ -253,11 +101,6 @@ export function ExecutionAnalyticsForm() {
|
||||
|
||||
setResults(null);
|
||||
|
||||
// Fetch accuracy trends if chart is enabled
|
||||
if (showAccuracyChart) {
|
||||
fetchAccuracyTrends(formData.graph_id, formData.user_id || undefined);
|
||||
}
|
||||
|
||||
// Prepare the request payload
|
||||
const payload: ExecutionAnalyticsRequest = {
|
||||
graph_id: formData.graph_id.trim(),
|
||||
@@ -419,18 +262,6 @@ export function ExecutionAnalyticsForm() {
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Show Accuracy Chart Checkbox */}
|
||||
<div className="flex items-center space-x-2">
|
||||
<Checkbox
|
||||
id="show_accuracy_chart"
|
||||
checked={showAccuracyChart}
|
||||
onCheckedChange={(checked) => setShowAccuracyChart(!!checked)}
|
||||
/>
|
||||
<Label htmlFor="show_accuracy_chart" className="text-sm">
|
||||
Show accuracy trends chart and historical data visualization
|
||||
</Label>
|
||||
</div>
|
||||
|
||||
{/* Custom System Prompt */}
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="system_prompt">
|
||||
@@ -539,98 +370,6 @@ export function ExecutionAnalyticsForm() {
|
||||
</div>
|
||||
</form>
|
||||
|
||||
{/* Accuracy Trends Display */}
|
||||
{trendsData && (
|
||||
<div className="space-y-4">
|
||||
<h3 className="text-lg font-semibold">Execution Accuracy Trends</h3>
|
||||
|
||||
{/* Alert Section */}
|
||||
{trendsData.alert && (
|
||||
<div className="rounded-lg border-l-4 border-red-500 bg-red-50 p-4">
|
||||
<div className="flex items-start">
|
||||
<span className="text-2xl">🚨</span>
|
||||
<div className="ml-3 space-y-2">
|
||||
<h4 className="text-lg font-semibold text-red-800">
|
||||
Accuracy Alert Detected
|
||||
</h4>
|
||||
<p className="text-red-700">
|
||||
<strong>
|
||||
{trendsData.alert.drop_percent.toFixed(1)}% accuracy drop
|
||||
</strong>{" "}
|
||||
detected for agent{" "}
|
||||
<code className="rounded bg-red-100 px-1 text-sm">
|
||||
{formData.graph_id}
|
||||
</code>
|
||||
</p>
|
||||
<div className="space-y-1 text-sm text-red-600">
|
||||
<p>
|
||||
• 3-day average:{" "}
|
||||
<strong>
|
||||
{trendsData.alert.three_day_avg.toFixed(2)}%
|
||||
</strong>
|
||||
</p>
|
||||
<p>
|
||||
• 7-day average:{" "}
|
||||
<strong>
|
||||
{trendsData.alert.seven_day_avg.toFixed(2)}%
|
||||
</strong>
|
||||
</p>
|
||||
<p>
|
||||
• Detected at:{" "}
|
||||
<strong>
|
||||
{new Date(
|
||||
trendsData.alert.detected_at,
|
||||
).toLocaleString()}
|
||||
</strong>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Latest Data Summary */}
|
||||
<div className="grid grid-cols-2 gap-4 md:grid-cols-4">
|
||||
<div className="rounded-lg border bg-white p-4 text-center">
|
||||
<div className="text-2xl font-bold text-blue-600">
|
||||
{trendsData.latest_data.daily_score?.toFixed(2) || "N/A"}
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">Daily Score</div>
|
||||
</div>
|
||||
<div className="rounded-lg border bg-white p-4 text-center">
|
||||
<div className="text-2xl font-bold text-green-600">
|
||||
{trendsData.latest_data.three_day_avg?.toFixed(2) || "N/A"}
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">3-Day Avg</div>
|
||||
</div>
|
||||
<div className="rounded-lg border bg-white p-4 text-center">
|
||||
<div className="text-2xl font-bold text-orange-600">
|
||||
{trendsData.latest_data.seven_day_avg?.toFixed(2) || "N/A"}
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">7-Day Avg</div>
|
||||
</div>
|
||||
<div className="rounded-lg border bg-white p-4 text-center">
|
||||
<div className="text-2xl font-bold text-purple-600">
|
||||
{trendsData.latest_data.fourteen_day_avg?.toFixed(2) || "N/A"}
|
||||
</div>
|
||||
<div className="text-sm text-gray-600">14-Day Avg</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Chart Section - only show when toggle is enabled and historical data exists */}
|
||||
{showAccuracyChart && trendsData?.historical_data && (
|
||||
<div className="mt-6">
|
||||
<h4 className="mb-4 text-lg font-semibold">
|
||||
Execution Accuracy Trends Chart
|
||||
</h4>
|
||||
<div className="rounded-lg border bg-white p-6">
|
||||
<AccuracyChart data={trendsData.historical_data} />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{results && <AnalyticsResultsTable results={results} />}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -17,13 +17,12 @@ function ExecutionAnalyticsDashboard() {
|
||||
</div>
|
||||
|
||||
<div className="rounded-lg border bg-white p-6 shadow-sm">
|
||||
<h2 className="mb-4 text-xl font-semibold">
|
||||
Execution Analytics & Accuracy Monitoring
|
||||
</h2>
|
||||
<h2 className="mb-4 text-xl font-semibold">Analytics Generation</h2>
|
||||
<p className="mb-6 text-gray-600">
|
||||
Generate missing activity summaries and success scores for agent
|
||||
executions. After generation, accuracy trends and alerts will
|
||||
automatically be displayed to help monitor agent health over time.
|
||||
This tool will identify completed executions missing activity
|
||||
summaries or success scores and generate them using AI. Only
|
||||
executions that meet the criteria and are missing these fields will
|
||||
be processed.
|
||||
</p>
|
||||
|
||||
<Suspense
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { OAuthPopupResultMessage } from "@/components/renderers/input-renderer/fields/CredentialField/models/OAuthCredentialModal/useOAuthCredentialModal";
|
||||
import { OAuthPopupResultMessage } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
// This route is intended to be used as the callback for integration OAuth flows,
|
||||
|
||||
@@ -9,8 +9,6 @@ import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { useState } from "react";
|
||||
import { useSaveGraph } from "@/app/(platform)/build/hooks/useSaveGraph";
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { ApiError } from "@/lib/autogpt-server-api/helpers"; // Check if this exists
|
||||
|
||||
export const useRunGraph = () => {
|
||||
const { saveGraph, isSaving } = useSaveGraph({
|
||||
@@ -26,13 +24,6 @@ export const useRunGraph = () => {
|
||||
);
|
||||
const [openRunInputDialog, setOpenRunInputDialog] = useState(false);
|
||||
|
||||
const setNodeErrorsForBackendId = useNodeStore(
|
||||
useShallow((state) => state.setNodeErrorsForBackendId),
|
||||
);
|
||||
const clearAllNodeErrors = useNodeStore(
|
||||
useShallow((state) => state.clearAllNodeErrors),
|
||||
);
|
||||
|
||||
const [{ flowID, flowVersion, flowExecutionID }, setQueryStates] =
|
||||
useQueryStates({
|
||||
flowID: parseAsString,
|
||||
@@ -44,49 +35,19 @@ export const useRunGraph = () => {
|
||||
usePostV1ExecuteGraphAgent({
|
||||
mutation: {
|
||||
onSuccess: (response: any) => {
|
||||
clearAllNodeErrors();
|
||||
const { id } = response.data as GraphExecutionMeta;
|
||||
setQueryStates({
|
||||
flowExecutionID: id,
|
||||
});
|
||||
},
|
||||
onError: (error: any) => {
|
||||
// Reset running state on error
|
||||
setIsGraphRunning(false);
|
||||
if (error instanceof ApiError && error.isGraphValidationError?.()) {
|
||||
const errorData = error.response?.detail;
|
||||
|
||||
if (errorData?.node_errors) {
|
||||
Object.entries(errorData.node_errors).forEach(
|
||||
([backendId, nodeErrors]) => {
|
||||
setNodeErrorsForBackendId(
|
||||
backendId,
|
||||
nodeErrors as { [key: string]: string },
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
useNodeStore.getState().nodes.forEach((node) => {
|
||||
const backendId = node.data.metadata?.backend_id || node.id;
|
||||
if (!errorData.node_errors[backendId as string]) {
|
||||
useNodeStore.getState().updateNodeErrors(node.id, {});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
toast({
|
||||
title: errorData?.message || "Graph validation failed",
|
||||
description:
|
||||
"Please fix the validation errors on the highlighted nodes and try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
} else {
|
||||
toast({
|
||||
title:
|
||||
(error.detail as string) ?? "An unexpected error occurred.",
|
||||
description: "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
}
|
||||
toast({
|
||||
title: (error.detail as string) ?? "An unexpected error occurred.",
|
||||
description: "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -116,7 +77,7 @@ export const useRunGraph = () => {
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
data: { inputs: {}, credentials_inputs: {}, source: "builder" },
|
||||
data: { inputs: {}, credentials_inputs: {} },
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -79,11 +79,7 @@ export const useRunInputDialog = ({
|
||||
await executeGraph({
|
||||
graphId: flowID ?? "",
|
||||
graphVersion: flowVersion || null,
|
||||
data: {
|
||||
inputs: inputValues,
|
||||
credentials_inputs: credentialValues,
|
||||
source: "builder",
|
||||
},
|
||||
data: { inputs: inputValues, credentials_inputs: credentialValues },
|
||||
});
|
||||
// Optimistically set running state immediately for responsive UI
|
||||
setIsGraphRunning(true);
|
||||
|
||||
@@ -1,25 +1,24 @@
|
||||
import { useCallback } from "react";
|
||||
import { useReactFlow } from "@xyflow/react";
|
||||
import { Key, storage } from "@/services/storage/local-storage";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { useNodeStore } from "../../../stores/nodeStore";
|
||||
import { useEdgeStore } from "../../../stores/edgeStore";
|
||||
import { CustomNode } from "../nodes/CustomNode/CustomNode";
|
||||
import { CustomEdge } from "../edges/CustomEdge";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
|
||||
interface CopyableData {
|
||||
nodes: CustomNode[];
|
||||
edges: CustomEdge[];
|
||||
}
|
||||
|
||||
const CLIPBOARD_PREFIX = "autogpt-flow-data:";
|
||||
|
||||
export function useCopyPaste() {
|
||||
// Only use useReactFlow for viewport (not managed by stores)
|
||||
const { getViewport } = useReactFlow();
|
||||
const { toast } = useToast();
|
||||
|
||||
const handleCopyPaste = useCallback(
|
||||
(event: KeyboardEvent) => {
|
||||
// Prevent copy/paste if any modal is open or if the focus is on an input element
|
||||
const activeElement = document.activeElement;
|
||||
const isInputField =
|
||||
activeElement?.tagName === "INPUT" ||
|
||||
@@ -29,6 +28,7 @@ export function useCopyPaste() {
|
||||
if (isInputField) return;
|
||||
|
||||
if (event.ctrlKey || event.metaKey) {
|
||||
// COPY: Ctrl+C or Cmd+C
|
||||
if (event.key === "c" || event.key === "C") {
|
||||
const { nodes } = useNodeStore.getState();
|
||||
const { edges } = useEdgeStore.getState();
|
||||
@@ -53,102 +53,81 @@ export function useCopyPaste() {
|
||||
edges: selectedEdges,
|
||||
};
|
||||
|
||||
const clipboardText = `${CLIPBOARD_PREFIX}${JSON.stringify(copiedData)}`;
|
||||
navigator.clipboard
|
||||
.writeText(clipboardText)
|
||||
.then(() => {
|
||||
toast({
|
||||
title: "Copied successfully",
|
||||
description: `${selectedNodes.length} node(s) copied to clipboard`,
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Failed to copy to clipboard:", error);
|
||||
});
|
||||
storage.set(Key.COPIED_FLOW_DATA, JSON.stringify(copiedData));
|
||||
}
|
||||
|
||||
// PASTE: Ctrl+V or Cmd+V
|
||||
if (event.key === "v" || event.key === "V") {
|
||||
navigator.clipboard
|
||||
.readText()
|
||||
.then((clipboardText) => {
|
||||
if (!clipboardText.startsWith(CLIPBOARD_PREFIX)) {
|
||||
return; // Not our data, ignore
|
||||
}
|
||||
const copiedDataString = storage.get(Key.COPIED_FLOW_DATA);
|
||||
if (copiedDataString) {
|
||||
const copiedData = JSON.parse(copiedDataString) as CopyableData;
|
||||
const oldToNewIdMap: Record<string, string> = {};
|
||||
|
||||
const jsonString = clipboardText.slice(CLIPBOARD_PREFIX.length);
|
||||
const copiedData = JSON.parse(jsonString) as CopyableData;
|
||||
const oldToNewIdMap: Record<string, string> = {};
|
||||
// Get fresh viewport values at paste time to ensure correct positioning
|
||||
const { x, y, zoom } = getViewport();
|
||||
const viewportCenter = {
|
||||
x: (window.innerWidth / 2 - x) / zoom,
|
||||
y: (window.innerHeight / 2 - y) / zoom,
|
||||
};
|
||||
|
||||
const { x, y, zoom } = getViewport();
|
||||
const viewportCenter = {
|
||||
x: (window.innerWidth / 2 - x) / zoom,
|
||||
y: (window.innerHeight / 2 - y) / zoom,
|
||||
let minX = Infinity,
|
||||
minY = Infinity,
|
||||
maxX = -Infinity,
|
||||
maxY = -Infinity;
|
||||
copiedData.nodes.forEach((node) => {
|
||||
minX = Math.min(minX, node.position.x);
|
||||
minY = Math.min(minY, node.position.y);
|
||||
maxX = Math.max(maxX, node.position.x);
|
||||
maxY = Math.max(maxY, node.position.y);
|
||||
});
|
||||
|
||||
const offsetX = viewportCenter.x - (minX + maxX) / 2;
|
||||
const offsetY = viewportCenter.y - (minY + maxY) / 2;
|
||||
|
||||
// Deselect existing nodes first
|
||||
useNodeStore.setState((state) => ({
|
||||
nodes: state.nodes.map((node) => ({ ...node, selected: false })),
|
||||
}));
|
||||
|
||||
// Create and add new nodes with UNIQUE IDs using UUID
|
||||
copiedData.nodes.forEach((node) => {
|
||||
const newNodeId = uuidv4();
|
||||
oldToNewIdMap[node.id] = newNodeId;
|
||||
|
||||
const newNode: CustomNode = {
|
||||
...node,
|
||||
id: newNodeId,
|
||||
selected: true,
|
||||
position: {
|
||||
x: node.position.x + offsetX,
|
||||
y: node.position.y + offsetY,
|
||||
},
|
||||
};
|
||||
|
||||
let minX = Infinity,
|
||||
minY = Infinity,
|
||||
maxX = -Infinity,
|
||||
maxY = -Infinity;
|
||||
copiedData.nodes.forEach((node) => {
|
||||
minX = Math.min(minX, node.position.x);
|
||||
minY = Math.min(minY, node.position.y);
|
||||
maxX = Math.max(maxX, node.position.x);
|
||||
maxY = Math.max(maxY, node.position.y);
|
||||
});
|
||||
|
||||
const offsetX = viewportCenter.x - (minX + maxX) / 2;
|
||||
const offsetY = viewportCenter.y - (minY + maxY) / 2;
|
||||
|
||||
// Deselect existing nodes first
|
||||
useNodeStore.setState((state) => ({
|
||||
nodes: state.nodes.map((node) => ({
|
||||
...node,
|
||||
selected: false,
|
||||
})),
|
||||
}));
|
||||
|
||||
// Create and add new nodes with UNIQUE IDs using UUID
|
||||
copiedData.nodes.forEach((node) => {
|
||||
const newNodeId = uuidv4();
|
||||
oldToNewIdMap[node.id] = newNodeId;
|
||||
|
||||
const newNode: CustomNode = {
|
||||
...node,
|
||||
id: newNodeId,
|
||||
selected: true,
|
||||
position: {
|
||||
x: node.position.x + offsetX,
|
||||
y: node.position.y + offsetY,
|
||||
},
|
||||
};
|
||||
|
||||
useNodeStore.getState().addNode(newNode);
|
||||
});
|
||||
|
||||
// Add edges with updated source/target IDs
|
||||
const { addEdge } = useEdgeStore.getState();
|
||||
copiedData.edges.forEach((edge) => {
|
||||
const newSourceId = oldToNewIdMap[edge.source] ?? edge.source;
|
||||
const newTargetId = oldToNewIdMap[edge.target] ?? edge.target;
|
||||
|
||||
addEdge({
|
||||
source: newSourceId,
|
||||
target: newTargetId,
|
||||
sourceHandle: edge.sourceHandle ?? "",
|
||||
targetHandle: edge.targetHandle ?? "",
|
||||
data: {
|
||||
...edge.data,
|
||||
},
|
||||
});
|
||||
});
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Failed to read from clipboard:", error);
|
||||
useNodeStore.getState().addNode(newNode);
|
||||
});
|
||||
|
||||
// Add edges with updated source/target IDs
|
||||
const { addEdge } = useEdgeStore.getState();
|
||||
copiedData.edges.forEach((edge) => {
|
||||
const newSourceId = oldToNewIdMap[edge.source] ?? edge.source;
|
||||
const newTargetId = oldToNewIdMap[edge.target] ?? edge.target;
|
||||
|
||||
addEdge({
|
||||
source: newSourceId,
|
||||
target: newTargetId,
|
||||
sourceHandle: edge.sourceHandle ?? "",
|
||||
targetHandle: edge.targetHandle ?? "",
|
||||
data: {
|
||||
...edge.data,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
[getViewport, toast],
|
||||
[getViewport],
|
||||
);
|
||||
|
||||
return handleCopyPaste;
|
||||
|
||||
@@ -42,12 +42,11 @@ export const useFlow = () => {
|
||||
const setBlockMenuOpen = useControlPanelStore(
|
||||
useShallow((state) => state.setBlockMenuOpen),
|
||||
);
|
||||
const [{ flowID, flowVersion, flowExecutionID }, setQueryStates] =
|
||||
useQueryStates({
|
||||
flowID: parseAsString,
|
||||
flowVersion: parseAsInteger,
|
||||
flowExecutionID: parseAsString,
|
||||
});
|
||||
const [{ flowID, flowVersion, flowExecutionID }] = useQueryStates({
|
||||
flowID: parseAsString,
|
||||
flowVersion: parseAsInteger,
|
||||
flowExecutionID: parseAsString,
|
||||
});
|
||||
|
||||
const { data: executionDetails } = useGetV1GetExecutionDetails(
|
||||
flowID || "",
|
||||
@@ -82,7 +81,7 @@ export const useFlow = () => {
|
||||
{
|
||||
query: {
|
||||
select: (res) => res.data as BlockInfo[],
|
||||
enabled: !!flowID && !!blockIds && blockIds.length > 0,
|
||||
enabled: !!flowID && !!blockIds,
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -103,9 +102,6 @@ export const useFlow = () => {
|
||||
// load graph schemas
|
||||
useEffect(() => {
|
||||
if (graph) {
|
||||
setQueryStates({
|
||||
flowVersion: graph.version ?? 1,
|
||||
});
|
||||
setGraphSchemas(
|
||||
graph.input_schema as Record<string, any> | null,
|
||||
graph.credentials_input_schema as Record<string, any> | null,
|
||||
|
||||
@@ -37,7 +37,6 @@ export type CustomNodeData = {
|
||||
costs: BlockCost[];
|
||||
categories: BlockInfoCategoriesItem[];
|
||||
metadata?: NodeModelMetadata;
|
||||
errors?: { [key: string]: string };
|
||||
};
|
||||
|
||||
export type CustomNode = XYNode<CustomNodeData, "custom">;
|
||||
@@ -72,24 +71,10 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
|
||||
? (data.hardcodedValues.output_schema ?? {})
|
||||
: data.outputSchema;
|
||||
|
||||
const hasConfigErrors =
|
||||
data.errors &&
|
||||
Object.values(data.errors).some(
|
||||
(value) => value !== null && value !== undefined && value !== "",
|
||||
);
|
||||
|
||||
const outputData = data.nodeExecutionResult?.output_data;
|
||||
const hasOutputError =
|
||||
typeof outputData === "object" &&
|
||||
outputData !== null &&
|
||||
"error" in outputData;
|
||||
|
||||
const hasErrors = hasConfigErrors || hasOutputError;
|
||||
|
||||
// Currently all blockTypes design are similar - that's why i am using the same component for all of them
|
||||
// If in future - if we need some drastic change in some blockTypes design - we can create separate components for them
|
||||
return (
|
||||
<NodeContainer selected={selected} nodeId={nodeId} hasErrors={hasErrors}>
|
||||
<NodeContainer selected={selected} nodeId={nodeId}>
|
||||
<div className="rounded-xlarge bg-white">
|
||||
<NodeHeader data={data} nodeId={nodeId} />
|
||||
{isWebhook && <WebhookDisclaimer nodeId={nodeId} />}
|
||||
|
||||
@@ -3,18 +3,15 @@ import { nodeStyleBasedOnStatus } from "../helpers";
|
||||
|
||||
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
|
||||
import { useShallow } from "zustand/react/shallow";
|
||||
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
|
||||
|
||||
export const NodeContainer = ({
|
||||
children,
|
||||
nodeId,
|
||||
selected,
|
||||
hasErrors, // these are configuration errors that occur before executing the graph -- more like validation errors
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
nodeId: string;
|
||||
selected: boolean;
|
||||
hasErrors?: boolean;
|
||||
}) => {
|
||||
const status = useNodeStore(
|
||||
useShallow((state) => state.getNodeStatus(nodeId)),
|
||||
@@ -25,7 +22,6 @@ export const NodeContainer = ({
|
||||
"z-12 max-w-[370px] rounded-xlarge ring-1 ring-slate-200/60",
|
||||
selected && "shadow-lg ring-2 ring-slate-200",
|
||||
status && nodeStyleBasedOnStatus[status],
|
||||
hasErrors ? nodeStyleBasedOnStatus[AgentExecutionStatus.FAILED] : "",
|
||||
)}
|
||||
>
|
||||
{children}
|
||||
|
||||
@@ -28,7 +28,6 @@ export const NodeContextMenu = ({
|
||||
})),
|
||||
}));
|
||||
|
||||
useCopyPasteStore.getState().copySelectedNodes();
|
||||
useCopyPasteStore.getState().pasteNodes();
|
||||
};
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
import { useGetV2BuilderSearchInfinite } from "@/app/api/__generated__/endpoints/store/store";
|
||||
import { SearchResponse } from "@/app/api/__generated__/models/searchResponse";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { useState } from "react";
|
||||
import { useAddAgentToBuilder } from "../hooks/useAddAgentToBuilder";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { getV2GetSpecificAgent } from "@/app/api/__generated__/endpoints/store/store";
|
||||
@@ -9,27 +9,16 @@ import {
|
||||
getGetV2ListLibraryAgentsQueryKey,
|
||||
usePostV2AddMarketplaceAgent,
|
||||
} from "@/app/api/__generated__/endpoints/library/library";
|
||||
import {
|
||||
getGetV2GetBuilderItemCountsQueryKey,
|
||||
getGetV2GetBuilderSuggestionsQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/default/default";
|
||||
import { getGetV2GetBuilderItemCountsQueryKey } from "@/app/api/__generated__/endpoints/default/default";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export const useBlockMenuSearch = () => {
|
||||
const { searchQuery, searchId, setSearchId } = useBlockMenuStore();
|
||||
const { searchQuery } = useBlockMenuStore();
|
||||
const { toast } = useToast();
|
||||
const { addAgentToBuilder, addLibraryAgentToBuilder } =
|
||||
useAddAgentToBuilder();
|
||||
const queryClient = getQueryClient();
|
||||
|
||||
const resetSearchSession = useCallback(() => {
|
||||
setSearchId(undefined);
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2GetBuilderSuggestionsQueryKey(),
|
||||
});
|
||||
}, [queryClient, setSearchId]);
|
||||
|
||||
const [addingLibraryAgentId, setAddingLibraryAgentId] = useState<
|
||||
string | null
|
||||
@@ -49,19 +38,13 @@ export const useBlockMenuSearch = () => {
|
||||
page: 1,
|
||||
page_size: 8,
|
||||
search_query: searchQuery,
|
||||
search_id: searchId,
|
||||
},
|
||||
{
|
||||
query: {
|
||||
getNextPageParam: (lastPage) => {
|
||||
const response = lastPage.data as SearchResponse;
|
||||
const { pagination } = response;
|
||||
if (!pagination) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const { current_page, total_pages } = pagination;
|
||||
return current_page < total_pages ? current_page + 1 : undefined;
|
||||
getNextPageParam: (lastPage, allPages) => {
|
||||
const pagination = lastPage.data as SearchResponse;
|
||||
const isMore = pagination.more_pages;
|
||||
return isMore ? allPages.length + 1 : undefined;
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -70,6 +53,7 @@ export const useBlockMenuSearch = () => {
|
||||
const { mutateAsync: addMarketplaceAgent } = usePostV2AddMarketplaceAgent({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
const queryClient = getQueryClient();
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
@@ -91,24 +75,6 @@ export const useBlockMenuSearch = () => {
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (!searchData?.pages?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
const latestPage = searchData.pages[searchData.pages.length - 1];
|
||||
const response = latestPage?.data as SearchResponse;
|
||||
if (response?.search_id && response.search_id !== searchId) {
|
||||
setSearchId(response.search_id);
|
||||
}
|
||||
}, [searchData, searchId, setSearchId]);
|
||||
|
||||
useEffect(() => {
|
||||
if (searchId && !searchQuery) {
|
||||
resetSearchSession();
|
||||
}
|
||||
}, [resetSearchSession, searchId, searchQuery]);
|
||||
|
||||
const allSearchData =
|
||||
searchData?.pages?.flatMap((page) => {
|
||||
const response = page.data as SearchResponse;
|
||||
|
||||
@@ -1,32 +1,30 @@
|
||||
import { debounce } from "lodash";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import { getGetV2GetBuilderSuggestionsQueryKey } from "@/app/api/__generated__/endpoints/default/default";
|
||||
|
||||
const SEARCH_DEBOUNCE_MS = 300;
|
||||
|
||||
export const useBlockMenuSearchBar = () => {
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
const [localQuery, setLocalQuery] = useState("");
|
||||
const { setSearchQuery, setSearchId, searchQuery } = useBlockMenuStore();
|
||||
const queryClient = getQueryClient();
|
||||
const { setSearchQuery, setSearchId, searchId, searchQuery } =
|
||||
useBlockMenuStore();
|
||||
|
||||
const clearSearchSession = useCallback(() => {
|
||||
setSearchId(undefined);
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2GetBuilderSuggestionsQueryKey(),
|
||||
});
|
||||
}, [queryClient, setSearchId]);
|
||||
const searchIdRef = useRef(searchId);
|
||||
useEffect(() => {
|
||||
searchIdRef.current = searchId;
|
||||
}, [searchId]);
|
||||
|
||||
const debouncedSetSearchQuery = useCallback(
|
||||
debounce((value: string) => {
|
||||
setSearchQuery(value);
|
||||
if (value.length === 0) {
|
||||
clearSearchSession();
|
||||
setSearchId(undefined);
|
||||
} else if (!searchIdRef.current) {
|
||||
setSearchId(crypto.randomUUID());
|
||||
}
|
||||
}, SEARCH_DEBOUNCE_MS),
|
||||
[clearSearchSession, setSearchQuery],
|
||||
[setSearchQuery, setSearchId],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -38,13 +36,13 @@ export const useBlockMenuSearchBar = () => {
|
||||
const handleClear = () => {
|
||||
setLocalQuery("");
|
||||
setSearchQuery("");
|
||||
clearSearchSession();
|
||||
setSearchId(undefined);
|
||||
debouncedSetSearchQuery.cancel();
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setLocalQuery(searchQuery);
|
||||
}, [searchQuery]);
|
||||
}, []);
|
||||
|
||||
return {
|
||||
handleClear,
|
||||
|
||||
@@ -1,109 +0,0 @@
|
||||
import React, { useEffect, useRef, useState } from "react";
|
||||
import { ArrowLeftIcon, ArrowRightIcon } from "@phosphor-icons/react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
interface HorizontalScrollAreaProps {
|
||||
children: React.ReactNode;
|
||||
wrapperClassName?: string;
|
||||
scrollContainerClassName?: string;
|
||||
scrollAmount?: number;
|
||||
dependencyList?: React.DependencyList;
|
||||
}
|
||||
|
||||
const defaultDependencies: React.DependencyList = [];
|
||||
const baseScrollClasses =
|
||||
"flex gap-2 overflow-x-auto px-8 [scrollbar-width:none] [-ms-overflow-style:'none'] [&::-webkit-scrollbar]:hidden";
|
||||
|
||||
export const HorizontalScroll: React.FC<HorizontalScrollAreaProps> = ({
|
||||
children,
|
||||
wrapperClassName,
|
||||
scrollContainerClassName,
|
||||
scrollAmount = 300,
|
||||
dependencyList = defaultDependencies,
|
||||
}) => {
|
||||
const scrollRef = useRef<HTMLDivElement | null>(null);
|
||||
const [canScrollLeft, setCanScrollLeft] = useState(false);
|
||||
const [canScrollRight, setCanScrollRight] = useState(false);
|
||||
|
||||
const scrollByDelta = (delta: number) => {
|
||||
if (!scrollRef.current) {
|
||||
return;
|
||||
}
|
||||
scrollRef.current.scrollBy({ left: delta, behavior: "smooth" });
|
||||
};
|
||||
|
||||
const updateScrollState = () => {
|
||||
const element = scrollRef.current;
|
||||
if (!element) {
|
||||
setCanScrollLeft(false);
|
||||
setCanScrollRight(false);
|
||||
return;
|
||||
}
|
||||
setCanScrollLeft(element.scrollLeft > 0);
|
||||
setCanScrollRight(
|
||||
Math.ceil(element.scrollLeft + element.clientWidth) < element.scrollWidth,
|
||||
);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
updateScrollState();
|
||||
const element = scrollRef.current;
|
||||
if (!element) {
|
||||
return;
|
||||
}
|
||||
const handleScroll = () => updateScrollState();
|
||||
element.addEventListener("scroll", handleScroll);
|
||||
window.addEventListener("resize", handleScroll);
|
||||
return () => {
|
||||
element.removeEventListener("scroll", handleScroll);
|
||||
window.removeEventListener("resize", handleScroll);
|
||||
};
|
||||
}, dependencyList);
|
||||
|
||||
return (
|
||||
<div className={wrapperClassName}>
|
||||
<div className="group relative">
|
||||
<div
|
||||
ref={scrollRef}
|
||||
className={cn(baseScrollClasses, scrollContainerClassName)}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
{canScrollLeft && (
|
||||
<div className="pointer-events-none absolute inset-y-0 left-0 w-8 bg-gradient-to-r from-white via-white/80 to-white/0" />
|
||||
)}
|
||||
{canScrollRight && (
|
||||
<div className="pointer-events-none absolute inset-y-0 right-0 w-8 bg-gradient-to-l from-white via-white/80 to-white/0" />
|
||||
)}
|
||||
{canScrollLeft && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Scroll left"
|
||||
className="pointer-events-none absolute left-2 top-5 -translate-y-1/2 opacity-0 transition-opacity duration-200 group-hover:pointer-events-auto group-hover:opacity-100"
|
||||
onClick={() => scrollByDelta(-scrollAmount)}
|
||||
>
|
||||
<ArrowLeftIcon
|
||||
size={28}
|
||||
className="rounded-full bg-zinc-700 p-1 text-white drop-shadow"
|
||||
weight="light"
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
{canScrollRight && (
|
||||
<button
|
||||
type="button"
|
||||
aria-label="Scroll right"
|
||||
className="pointer-events-none absolute right-2 top-5 -translate-y-1/2 opacity-0 transition-opacity duration-200 group-hover:pointer-events-auto group-hover:opacity-100"
|
||||
onClick={() => scrollByDelta(scrollAmount)}
|
||||
>
|
||||
<ArrowRightIcon
|
||||
size={28}
|
||||
className="rounded-full bg-zinc-700 p-1 text-white drop-shadow"
|
||||
weight="light"
|
||||
/>
|
||||
</button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -6,15 +6,10 @@ import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
import { blockMenuContainerStyle } from "../style";
|
||||
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
|
||||
import { DefaultStateType } from "../types";
|
||||
import { SearchHistoryChip } from "../SearchHistoryChip";
|
||||
import { HorizontalScroll } from "../HorizontalScroll";
|
||||
|
||||
export const SuggestionContent = () => {
|
||||
const { setIntegration, setDefaultState, setSearchQuery, setSearchId } =
|
||||
useBlockMenuStore();
|
||||
const { setIntegration, setDefaultState } = useBlockMenuStore();
|
||||
const { data, isLoading, isError, error, refetch } = useSuggestionContent();
|
||||
const suggestions = data?.suggestions;
|
||||
const hasRecentSearches = (suggestions?.recent_searches?.length ?? 0) > 0;
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
@@ -34,45 +29,11 @@ export const SuggestionContent = () => {
|
||||
);
|
||||
}
|
||||
|
||||
const suggestions = data?.suggestions;
|
||||
|
||||
return (
|
||||
<div className={blockMenuContainerStyle}>
|
||||
<div className="w-full space-y-6 pb-4">
|
||||
{/* Recent searches */}
|
||||
{hasRecentSearches && (
|
||||
<div className="space-y-2.5 px-4">
|
||||
<p className="font-sans text-sm font-medium leading-[1.375rem] text-zinc-800">
|
||||
Recent searches
|
||||
</p>
|
||||
<HorizontalScroll
|
||||
wrapperClassName="-mx-8"
|
||||
scrollContainerClassName="flex gap-2 overflow-x-auto px-8 [scrollbar-width:none] [-ms-overflow-style:'none'] [&::-webkit-scrollbar]:hidden"
|
||||
dependencyList={[
|
||||
suggestions?.recent_searches?.length ?? 0,
|
||||
isLoading,
|
||||
]}
|
||||
>
|
||||
{!isLoading && suggestions
|
||||
? suggestions.recent_searches.map((entry, index) => (
|
||||
<SearchHistoryChip
|
||||
key={entry.search_id || `${entry.search_query}-${index}`}
|
||||
content={entry.search_query || "Untitled search"}
|
||||
onClick={() => {
|
||||
setSearchQuery(entry.search_query || "");
|
||||
setSearchId(entry.search_id || undefined);
|
||||
}}
|
||||
/>
|
||||
))
|
||||
: Array(3)
|
||||
.fill(0)
|
||||
.map((_, index) => (
|
||||
<SearchHistoryChip.Skeleton
|
||||
key={`recent-search-skeleton-${index}`}
|
||||
/>
|
||||
))}
|
||||
</HorizontalScroll>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Integrations */}
|
||||
<div className="space-y-2.5 px-4">
|
||||
<p className="font-sans text-sm font-medium leading-[1.375rem] text-zinc-800">
|
||||
|
||||
@@ -981,17 +981,15 @@ const NodeArrayInput: FC<{
|
||||
);
|
||||
return (
|
||||
<div key={entryKey}>
|
||||
{schema.items && (
|
||||
<NodeHandle
|
||||
title={`#${index + 1}`}
|
||||
className="text-sm text-gray-500"
|
||||
keyName={entryKey}
|
||||
schema={schema.items}
|
||||
isConnected={isConnected}
|
||||
isRequired={false}
|
||||
side="left"
|
||||
/>
|
||||
)}
|
||||
<NodeHandle
|
||||
title={`#${index + 1}`}
|
||||
className="text-sm text-gray-500"
|
||||
keyName={entryKey}
|
||||
schema={schema.items!}
|
||||
isConnected={isConnected}
|
||||
isRequired={false}
|
||||
side="left"
|
||||
/>
|
||||
<div className="mb-2 flex space-x-2">
|
||||
{!isConnected &&
|
||||
(schema.items ? (
|
||||
|
||||
@@ -83,6 +83,7 @@ export function RunnerInputDialog({
|
||||
onRun={doRun ? undefined : doClose}
|
||||
doCreateSchedule={doCreateSchedule ? handleSchedule : undefined}
|
||||
onCreateSchedule={doCreateSchedule ? undefined : doClose}
|
||||
runCount={0}
|
||||
/>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
@@ -152,9 +152,7 @@ export const useSaveGraph = ({
|
||||
links: graphLinks,
|
||||
};
|
||||
|
||||
const response = await createNewGraph({
|
||||
data: { graph: data, source: "builder" },
|
||||
});
|
||||
const response = await createNewGraph({ data: { graph: data } });
|
||||
const graphData = response.data as GraphModel;
|
||||
setGraphSchemas(
|
||||
graphData.input_schema,
|
||||
|
||||
@@ -53,15 +53,6 @@ type NodeStore = {
|
||||
getNodeExecutionResult: (nodeId: string) => NodeExecutionResult | undefined;
|
||||
getNodeBlockUIType: (nodeId: string) => BlockUIType;
|
||||
hasWebhookNodes: () => boolean;
|
||||
|
||||
updateNodeErrors: (nodeId: string, errors: { [key: string]: string }) => void;
|
||||
clearNodeErrors: (nodeId: string) => void;
|
||||
getNodeErrors: (nodeId: string) => { [key: string]: string } | undefined;
|
||||
setNodeErrorsForBackendId: (
|
||||
backendId: string,
|
||||
errors: { [key: string]: string },
|
||||
) => void;
|
||||
clearAllNodeErrors: () => void; // Add this
|
||||
};
|
||||
|
||||
export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
@@ -262,47 +253,4 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
|
||||
[BlockUIType.WEBHOOK, BlockUIType.WEBHOOK_MANUAL].includes(n.data.uiType),
|
||||
);
|
||||
},
|
||||
|
||||
updateNodeErrors: (nodeId: string, errors: { [key: string]: string }) => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) =>
|
||||
n.id === nodeId ? { ...n, data: { ...n.data, errors } } : n,
|
||||
),
|
||||
}));
|
||||
},
|
||||
|
||||
clearNodeErrors: (nodeId: string) => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) =>
|
||||
n.id === nodeId ? { ...n, data: { ...n.data, errors: undefined } } : n,
|
||||
),
|
||||
}));
|
||||
},
|
||||
|
||||
getNodeErrors: (nodeId: string) => {
|
||||
return get().nodes.find((n) => n.id === nodeId)?.data?.errors;
|
||||
},
|
||||
|
||||
setNodeErrorsForBackendId: (
|
||||
backendId: string,
|
||||
errors: { [key: string]: string },
|
||||
) => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) => {
|
||||
// Match by backend_id if nodes have it, or by id
|
||||
const matches =
|
||||
n.data.metadata?.backend_id === backendId || n.id === backendId;
|
||||
return matches ? { ...n, data: { ...n.data, errors } } : n;
|
||||
}),
|
||||
}));
|
||||
},
|
||||
|
||||
clearAllNodeErrors: () => {
|
||||
set((state) => ({
|
||||
nodes: state.nodes.map((n) => ({
|
||||
...n,
|
||||
data: { ...n.data, errors: undefined },
|
||||
})),
|
||||
}));
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -141,6 +141,7 @@ export function ChatCredentialsSetup({
|
||||
onSelectCredentials={(credMeta) =>
|
||||
handleCredentialSelect(cred.provider, credMeta)
|
||||
}
|
||||
hideIfSingleCredentialAvailable={false}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
@@ -10,27 +11,20 @@ import { AgentRunsLoading } from "./components/other/AgentRunsLoading";
|
||||
import { EmptySchedules } from "./components/other/EmptySchedules";
|
||||
import { EmptyTasks } from "./components/other/EmptyTasks";
|
||||
import { EmptyTemplates } from "./components/other/EmptyTemplates";
|
||||
import { EmptyTriggers } from "./components/other/EmptyTriggers";
|
||||
import { SectionWrap } from "./components/other/SectionWrap";
|
||||
import { LoadingSelectedContent } from "./components/selected-views/LoadingSelectedContent";
|
||||
import { SelectedRunView } from "./components/selected-views/SelectedRunView/SelectedRunView";
|
||||
import { SelectedScheduleView } from "./components/selected-views/SelectedScheduleView/SelectedScheduleView";
|
||||
import { SelectedTemplateView } from "./components/selected-views/SelectedTemplateView/SelectedTemplateView";
|
||||
import { SelectedTriggerView } from "./components/selected-views/SelectedTriggerView/SelectedTriggerView";
|
||||
import { SelectedViewLayout } from "./components/selected-views/SelectedViewLayout";
|
||||
import { SidebarRunsList } from "./components/sidebar/SidebarRunsList/SidebarRunsList";
|
||||
import { AGENT_LIBRARY_SECTION_PADDING_X } from "./helpers";
|
||||
import { useNewAgentLibraryView } from "./useNewAgentLibraryView";
|
||||
|
||||
export function NewAgentLibraryView() {
|
||||
const {
|
||||
agentId,
|
||||
agent,
|
||||
ready,
|
||||
activeTemplate,
|
||||
isTemplateLoading,
|
||||
error,
|
||||
hasAnyItems,
|
||||
ready,
|
||||
error,
|
||||
agentId,
|
||||
activeItem,
|
||||
sidebarLoading,
|
||||
activeTab,
|
||||
@@ -38,9 +32,6 @@ export function NewAgentLibraryView() {
|
||||
handleSelectRun,
|
||||
handleCountsChange,
|
||||
handleClearSelectedRun,
|
||||
onRunInitiated,
|
||||
onTriggerSetup,
|
||||
onScheduleCreated,
|
||||
} = useNewAgentLibraryView();
|
||||
|
||||
if (error) {
|
||||
@@ -70,19 +61,14 @@ export function NewAgentLibraryView() {
|
||||
/>
|
||||
</div>
|
||||
<div className="flex min-h-0 flex-1">
|
||||
<EmptyTasks
|
||||
agent={agent}
|
||||
onRun={onRunInitiated}
|
||||
onTriggerSetup={onTriggerSetup}
|
||||
onScheduleCreated={onScheduleCreated}
|
||||
/>
|
||||
<EmptyTasks agent={agent} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mx-4 grid h-full grid-cols-1 gap-0 pt-3 md:ml-4 md:mr-0 md:gap-4 lg:grid-cols-[25%_70%]">
|
||||
<div className="ml-4 grid h-full grid-cols-1 gap-0 pt-3 md:gap-4 lg:grid-cols-[25%_70%]">
|
||||
<SectionWrap className="mb-3 block">
|
||||
<div
|
||||
className={cn(
|
||||
@@ -92,21 +78,16 @@ export function NewAgentLibraryView() {
|
||||
>
|
||||
<RunAgentModal
|
||||
triggerSlot={
|
||||
<Button
|
||||
variant="primary"
|
||||
size="large"
|
||||
className="w-full"
|
||||
disabled={isTemplateLoading && activeTab === "templates"}
|
||||
>
|
||||
<Button variant="primary" size="large" className="w-full">
|
||||
<PlusIcon size={20} /> New task
|
||||
</Button>
|
||||
}
|
||||
agent={agent}
|
||||
onRunCreated={onRunInitiated}
|
||||
onScheduleCreated={onScheduleCreated}
|
||||
onTriggerSetup={onTriggerSetup}
|
||||
initialInputValues={activeTemplate?.inputs}
|
||||
initialInputCredentials={activeTemplate?.credentials}
|
||||
agentId={agent.id.toString()}
|
||||
onRunCreated={(execution) => handleSelectRun(execution.id, "runs")}
|
||||
onScheduleCreated={(schedule) =>
|
||||
handleSelectRun(schedule.id, "scheduled")
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -120,60 +101,49 @@ export function NewAgentLibraryView() {
|
||||
/>
|
||||
</SectionWrap>
|
||||
|
||||
{activeItem ? (
|
||||
activeTab === "scheduled" ? (
|
||||
<SelectedScheduleView
|
||||
agent={agent}
|
||||
scheduleId={activeItem}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
<SectionWrap className="mb-3">
|
||||
<div
|
||||
className={`${AGENT_LIBRARY_SECTION_PADDING_X} border-b border-zinc-100 pb-4`}
|
||||
>
|
||||
<Breadcrumbs
|
||||
items={[
|
||||
{ name: "My Library", link: "/library" },
|
||||
{ name: agent.name, link: `/library/agents/${agentId}` },
|
||||
]}
|
||||
/>
|
||||
) : activeTab === "templates" ? (
|
||||
<SelectedTemplateView
|
||||
agent={agent}
|
||||
templateId={activeItem}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
onRunCreated={(execution) => handleSelectRun(execution.id, "runs")}
|
||||
onSwitchToRunsTab={() => setActiveTab("runs")}
|
||||
/>
|
||||
) : activeTab === "triggers" ? (
|
||||
<SelectedTriggerView
|
||||
agent={agent}
|
||||
triggerId={activeItem}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
onSwitchToRunsTab={() => setActiveTab("runs")}
|
||||
/>
|
||||
) : (
|
||||
<SelectedRunView
|
||||
agent={agent}
|
||||
runId={activeItem}
|
||||
onSelectRun={handleSelectRun}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
/>
|
||||
)
|
||||
) : sidebarLoading ? (
|
||||
<LoadingSelectedContent agentName={agent.name} agentId={agent.id} />
|
||||
) : activeTab === "scheduled" ? (
|
||||
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||
<EmptySchedules />
|
||||
</SelectedViewLayout>
|
||||
) : activeTab === "templates" ? (
|
||||
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||
<EmptyTemplates />
|
||||
</SelectedViewLayout>
|
||||
) : activeTab === "triggers" ? (
|
||||
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||
<EmptyTriggers />
|
||||
</SelectedViewLayout>
|
||||
) : (
|
||||
<SelectedViewLayout agentName={agent.name} agentId={agent.id}>
|
||||
<EmptyTasks
|
||||
agent={agent}
|
||||
onRun={onRunInitiated}
|
||||
onTriggerSetup={onTriggerSetup}
|
||||
onScheduleCreated={onScheduleCreated}
|
||||
/>
|
||||
</SelectedViewLayout>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex min-h-0 flex-1 flex-col">
|
||||
{activeItem ? (
|
||||
activeTab === "scheduled" ? (
|
||||
<SelectedScheduleView
|
||||
agent={agent}
|
||||
scheduleId={activeItem}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
/>
|
||||
) : (
|
||||
<SelectedRunView
|
||||
agent={agent}
|
||||
runId={activeItem}
|
||||
onSelectRun={handleSelectRun}
|
||||
onClearSelectedRun={handleClearSelectedRun}
|
||||
/>
|
||||
)
|
||||
) : sidebarLoading ? (
|
||||
<div className="flex flex-col gap-4">
|
||||
<Skeleton className="h-8 w-full bg-slate-100" />
|
||||
<Skeleton className="h-12 w-full bg-slate-100" />
|
||||
<Skeleton className="h-64 w-full bg-slate-100" />
|
||||
<Skeleton className="h-32 w-full bg-slate-100" />
|
||||
</div>
|
||||
) : activeTab === "scheduled" ? (
|
||||
<EmptySchedules />
|
||||
) : activeTab === "templates" ? (
|
||||
<EmptyTemplates />
|
||||
) : (
|
||||
<EmptyTasks agent={agent} />
|
||||
)}
|
||||
</div>
|
||||
</SectionWrap>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import type {
|
||||
BlockIOSubSchema,
|
||||
CredentialsMetaInput,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { CredentialsInput } from "../CredentialsInputs/CredentialsInputs";
|
||||
import type { CredentialsMetaInput } from "@/lib/autogpt-server-api/types";
|
||||
import { toDisplayName } from "@/providers/agent-credentials/helper";
|
||||
import {
|
||||
getAgentCredentialsFields,
|
||||
getAgentInputFields,
|
||||
getCredentialTypeDisplayName,
|
||||
renderValue,
|
||||
} from "./helpers";
|
||||
|
||||
@@ -23,21 +22,13 @@ export function AgentInputsReadOnly({
|
||||
inputs,
|
||||
credentialInputs,
|
||||
}: Props) {
|
||||
const inputFields = getAgentInputFields(agent);
|
||||
const credentialFieldEntries = Object.entries(
|
||||
getAgentCredentialsFields(agent),
|
||||
);
|
||||
const fields = getAgentInputFields(agent);
|
||||
const credentialFields = getAgentCredentialsFields(agent);
|
||||
const inputEntries = Object.entries(fields);
|
||||
const credentialEntries = Object.entries(credentialFields);
|
||||
|
||||
// Take actual input entries as leading; augment with schema from input fields.
|
||||
// TODO: ensure consistent ordering.
|
||||
const inputEntries =
|
||||
inputs &&
|
||||
Object.entries(inputs).map<[string, [BlockIOSubSchema | undefined, any]]>(
|
||||
([k, v]) => [k, [inputFields[k], v]],
|
||||
);
|
||||
|
||||
const hasInputs = inputEntries && inputEntries.length > 0;
|
||||
const hasCredentials = credentialInputs && credentialFieldEntries.length > 0;
|
||||
const hasInputs = inputs && inputEntries.length > 0;
|
||||
const hasCredentials = credentialInputs && credentialEntries.length > 0;
|
||||
|
||||
if (!hasInputs && !hasCredentials) {
|
||||
return <div className="text-neutral-600">No input for this run.</div>;
|
||||
@@ -48,13 +39,11 @@ export function AgentInputsReadOnly({
|
||||
{/* Regular inputs */}
|
||||
{hasInputs && (
|
||||
<div className="flex flex-col gap-4">
|
||||
{inputEntries.map(([key, [schema, value]]) => (
|
||||
{inputEntries.map(([key, sub]) => (
|
||||
<div key={key} className="flex flex-col gap-1.5">
|
||||
<label className="text-sm font-medium">
|
||||
{schema?.title || key}
|
||||
</label>
|
||||
<label className="text-sm font-medium">{sub?.title || key}</label>
|
||||
<p className="whitespace-pre-wrap break-words text-sm text-neutral-700">
|
||||
{renderValue(value)}
|
||||
{renderValue((inputs as Record<string, any>)[key])}
|
||||
</p>
|
||||
</div>
|
||||
))}
|
||||
@@ -65,18 +54,32 @@ export function AgentInputsReadOnly({
|
||||
{hasCredentials && (
|
||||
<div className="flex flex-col gap-6">
|
||||
{hasInputs && <div className="border-t border-neutral-200 pt-4" />}
|
||||
{credentialFieldEntries.map(([key, inputSubSchema]) => {
|
||||
{credentialEntries.map(([key, _sub]) => {
|
||||
const credential = credentialInputs![key];
|
||||
if (!credential) return null;
|
||||
|
||||
return (
|
||||
<CredentialsInput
|
||||
key={key}
|
||||
schema={{ ...inputSubSchema, discriminator: undefined } as any}
|
||||
selectedCredentials={credential}
|
||||
onSelectCredentials={() => {}}
|
||||
readOnly={true}
|
||||
/>
|
||||
<div key={key} className="flex flex-col gap-4">
|
||||
<h3 className="text-lg font-medium text-neutral-900">
|
||||
{toDisplayName(credential.provider)} credentials
|
||||
</h3>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center justify-between text-sm">
|
||||
<span className="text-neutral-600">Name</span>
|
||||
<span className="text-neutral-600">
|
||||
{getCredentialTypeDisplayName(credential.type)}
|
||||
</span>
|
||||
</div>
|
||||
<div className="flex items-center justify-between text-sm">
|
||||
<span className="text-neutral-900">
|
||||
{credential.title || "Untitled"}
|
||||
</span>
|
||||
<span className="font-mono text-neutral-400">
|
||||
{"*".repeat(25)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
|
||||
@@ -13,8 +13,7 @@ export function getCredentialTypeDisplayName(type: string): string {
|
||||
}
|
||||
|
||||
export function getAgentInputFields(agent: LibraryAgent): Record<string, any> {
|
||||
const schema = (agent.trigger_setup_info?.config_schema ??
|
||||
agent.input_schema) as unknown as {
|
||||
const schema = agent.input_schema as unknown as {
|
||||
properties?: Record<string, any>;
|
||||
} | null;
|
||||
if (!schema || !schema.properties) return {};
|
||||
|
||||
@@ -1,59 +1,189 @@
|
||||
import {
|
||||
IconKey,
|
||||
IconKeyPlus,
|
||||
IconUserPlus,
|
||||
} from "@/components/__legacy__/ui/icons";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectSeparator,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/__legacy__/ui/select";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import {
|
||||
BlockIOCredentialsSubSchema,
|
||||
CredentialsMetaInput,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { toDisplayName } from "@/providers/agent-credentials/helper";
|
||||
import { APIKeyCredentialsModal } from "./components/APIKeyCredentialsModal/APIKeyCredentialsModal";
|
||||
import { CredentialRow } from "./components/CredentialRow/CredentialRow";
|
||||
import { CredentialsSelect } from "./components/CredentialsSelect/CredentialsSelect";
|
||||
import { DeleteConfirmationModal } from "./components/DeleteConfirmationModal/DeleteConfirmationModal";
|
||||
import { HostScopedCredentialsModal } from "./components/HotScopedCredentialsModal/HotScopedCredentialsModal";
|
||||
import { OAuthFlowWaitingModal } from "./components/OAuthWaitingModal/OAuthWaitingModal";
|
||||
import { PasswordCredentialsModal } from "./components/PasswordCredentialsModal/PasswordCredentialsModal";
|
||||
import { getCredentialDisplayName } from "./helpers";
|
||||
import { useCredentialsInputs } from "./useCredentialsInputs";
|
||||
import { getHostFromUrl } from "@/lib/utils/url";
|
||||
import { NotionLogoIcon } from "@radix-ui/react-icons";
|
||||
import { FC, useEffect, useMemo, useState } from "react";
|
||||
import {
|
||||
FaDiscord,
|
||||
FaGithub,
|
||||
FaGoogle,
|
||||
FaHubspot,
|
||||
FaKey,
|
||||
FaMedium,
|
||||
FaTwitter,
|
||||
} from "react-icons/fa";
|
||||
import { APIKeyCredentialsModal } from "./APIKeyCredentialsModal/APIKeyCredentialsModal";
|
||||
import { HostScopedCredentialsModal } from "./HotScopedCredentialsModal/HotScopedCredentialsModal";
|
||||
import { OAuthFlowWaitingModal } from "./OAuthWaitingModal/OAuthWaitingModal";
|
||||
import { PasswordCredentialsModal } from "./PasswordCredentialsModal/PasswordCredentialsModal";
|
||||
|
||||
type UseCredentialsInputsReturn = ReturnType<typeof useCredentialsInputs>;
|
||||
const fallbackIcon = FaKey;
|
||||
|
||||
function isLoaded(
|
||||
data: UseCredentialsInputsReturn,
|
||||
): data is Extract<UseCredentialsInputsReturn, { isLoading: false }> {
|
||||
return data.isLoading === false;
|
||||
}
|
||||
// --8<-- [start:ProviderIconsEmbed]
|
||||
// Provider icons mapping - uses fallback for unknown providers
|
||||
export const providerIcons: Partial<
|
||||
Record<string, React.FC<{ className?: string }>>
|
||||
> = {
|
||||
aiml_api: fallbackIcon,
|
||||
anthropic: fallbackIcon,
|
||||
apollo: fallbackIcon,
|
||||
e2b: fallbackIcon,
|
||||
github: FaGithub,
|
||||
google: FaGoogle,
|
||||
groq: fallbackIcon,
|
||||
http: fallbackIcon,
|
||||
notion: NotionLogoIcon,
|
||||
nvidia: fallbackIcon,
|
||||
discord: FaDiscord,
|
||||
d_id: fallbackIcon,
|
||||
google_maps: FaGoogle,
|
||||
jina: fallbackIcon,
|
||||
ideogram: fallbackIcon,
|
||||
linear: fallbackIcon,
|
||||
medium: FaMedium,
|
||||
mem0: fallbackIcon,
|
||||
ollama: fallbackIcon,
|
||||
openai: fallbackIcon,
|
||||
openweathermap: fallbackIcon,
|
||||
open_router: fallbackIcon,
|
||||
llama_api: fallbackIcon,
|
||||
pinecone: fallbackIcon,
|
||||
enrichlayer: fallbackIcon,
|
||||
slant3d: fallbackIcon,
|
||||
screenshotone: fallbackIcon,
|
||||
smtp: fallbackIcon,
|
||||
replicate: fallbackIcon,
|
||||
reddit: fallbackIcon,
|
||||
fal: fallbackIcon,
|
||||
revid: fallbackIcon,
|
||||
twitter: FaTwitter,
|
||||
unreal_speech: fallbackIcon,
|
||||
exa: fallbackIcon,
|
||||
hubspot: FaHubspot,
|
||||
smartlead: fallbackIcon,
|
||||
todoist: fallbackIcon,
|
||||
zerobounce: fallbackIcon,
|
||||
};
|
||||
// --8<-- [end:ProviderIconsEmbed]
|
||||
|
||||
type Props = {
|
||||
export type OAuthPopupResultMessage = { message_type: "oauth_popup_result" } & (
|
||||
| {
|
||||
success: true;
|
||||
code: string;
|
||||
state: string;
|
||||
}
|
||||
| {
|
||||
success: false;
|
||||
message: string;
|
||||
}
|
||||
);
|
||||
|
||||
export const CredentialsInput: FC<{
|
||||
schema: BlockIOCredentialsSubSchema;
|
||||
className?: string;
|
||||
selectedCredentials?: CredentialsMetaInput;
|
||||
siblingInputs?: Record<string, any>;
|
||||
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
|
||||
siblingInputs?: Record<string, any>;
|
||||
hideIfSingleCredentialAvailable?: boolean;
|
||||
onLoaded?: (loaded: boolean) => void;
|
||||
readOnly?: boolean;
|
||||
};
|
||||
|
||||
export function CredentialsInput({
|
||||
}> = ({
|
||||
schema,
|
||||
className,
|
||||
selectedCredentials,
|
||||
onSelectCredentials,
|
||||
siblingInputs,
|
||||
hideIfSingleCredentialAvailable = true,
|
||||
onLoaded,
|
||||
readOnly = false,
|
||||
}: Props) {
|
||||
const hookData = useCredentialsInputs({
|
||||
schema,
|
||||
selectedCredentials,
|
||||
onSelectCredentials,
|
||||
siblingInputs,
|
||||
onLoaded,
|
||||
readOnly,
|
||||
});
|
||||
}) => {
|
||||
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
|
||||
useState(false);
|
||||
const [
|
||||
isUserPasswordCredentialsModalOpen,
|
||||
setUserPasswordCredentialsModalOpen,
|
||||
] = useState(false);
|
||||
const [isHostScopedCredentialsModalOpen, setHostScopedCredentialsModalOpen] =
|
||||
useState(false);
|
||||
const [isOAuth2FlowInProgress, setOAuth2FlowInProgress] = useState(false);
|
||||
const [oAuthPopupController, setOAuthPopupController] =
|
||||
useState<AbortController | null>(null);
|
||||
const [oAuthError, setOAuthError] = useState<string | null>(null);
|
||||
|
||||
if (!isLoaded(hookData)) {
|
||||
const api = useBackendAPI();
|
||||
const credentials = useCredentials(schema, siblingInputs);
|
||||
|
||||
// Report loaded state to parent
|
||||
useEffect(() => {
|
||||
if (onLoaded) {
|
||||
onLoaded(Boolean(credentials && credentials.isLoading === false));
|
||||
}
|
||||
}, [credentials, onLoaded]);
|
||||
|
||||
// Deselect credentials if they do not exist (e.g. provider was changed)
|
||||
useEffect(() => {
|
||||
if (!credentials || !("savedCredentials" in credentials)) return;
|
||||
if (
|
||||
selectedCredentials &&
|
||||
!credentials.savedCredentials.some((c) => c.id === selectedCredentials.id)
|
||||
) {
|
||||
onSelectCredentials(undefined);
|
||||
}
|
||||
}, [credentials, selectedCredentials, onSelectCredentials]);
|
||||
|
||||
const { hasRelevantCredentials, singleCredential } = useMemo(() => {
|
||||
if (!credentials || !("savedCredentials" in credentials)) {
|
||||
return {
|
||||
hasRelevantCredentials: false,
|
||||
singleCredential: null,
|
||||
};
|
||||
}
|
||||
|
||||
// Simple logic: if we have any saved credentials, we have relevant credentials
|
||||
const hasRelevant = credentials.savedCredentials.length > 0;
|
||||
|
||||
// Auto-select single credential if only one exists
|
||||
const single =
|
||||
credentials.savedCredentials.length === 1
|
||||
? credentials.savedCredentials[0]
|
||||
: null;
|
||||
|
||||
return {
|
||||
hasRelevantCredentials: hasRelevant,
|
||||
singleCredential: single,
|
||||
};
|
||||
}, [credentials]);
|
||||
|
||||
// If only 1 credential is available, auto-select it and hide this input
|
||||
useEffect(() => {
|
||||
if (singleCredential && !selectedCredentials) {
|
||||
onSelectCredentials(singleCredential);
|
||||
}
|
||||
}, [singleCredential, selectedCredentials, onSelectCredentials]);
|
||||
|
||||
if (
|
||||
!credentials ||
|
||||
credentials.isLoading ||
|
||||
(singleCredential && hideIfSingleCredentialAvailable)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -64,158 +194,309 @@ export function CredentialsInput({
|
||||
supportsOAuth2,
|
||||
supportsUserPassword,
|
||||
supportsHostScoped,
|
||||
credentialsToShow,
|
||||
oAuthError,
|
||||
isAPICredentialsModalOpen,
|
||||
isUserPasswordCredentialsModalOpen,
|
||||
isHostScopedCredentialsModalOpen,
|
||||
isOAuth2FlowInProgress,
|
||||
oAuthPopupController,
|
||||
credentialToDelete,
|
||||
deleteCredentialsMutation,
|
||||
actionButtonText,
|
||||
setAPICredentialsModalOpen,
|
||||
setUserPasswordCredentialsModalOpen,
|
||||
setHostScopedCredentialsModalOpen,
|
||||
setCredentialToDelete,
|
||||
handleActionButtonClick,
|
||||
handleCredentialSelect,
|
||||
handleDeleteCredential,
|
||||
handleDeleteConfirm,
|
||||
} = hookData;
|
||||
savedCredentials,
|
||||
oAuthCallback,
|
||||
} = credentials;
|
||||
|
||||
const displayName = toDisplayName(provider);
|
||||
const hasCredentialsToShow = credentialsToShow.length > 0;
|
||||
async function handleOAuthLogin() {
|
||||
setOAuthError(null);
|
||||
const { login_url, state_token } = await api.oAuthLogin(
|
||||
provider,
|
||||
schema.credentials_scopes,
|
||||
);
|
||||
setOAuth2FlowInProgress(true);
|
||||
const popup = window.open(login_url, "_blank", "popup=true");
|
||||
|
||||
return (
|
||||
<div className={cn("mb-6", className)}>
|
||||
<div className="mb-2 flex items-center gap-2">
|
||||
<Text variant="large-medium">{displayName} credentials</Text>
|
||||
{schema.description && (
|
||||
<InformationTooltip description={schema.description} />
|
||||
)}
|
||||
</div>
|
||||
if (!popup) {
|
||||
throw new Error(
|
||||
"Failed to open popup window. Please allow popups for this site.",
|
||||
);
|
||||
}
|
||||
|
||||
{hasCredentialsToShow ? (
|
||||
<>
|
||||
{credentialsToShow.length > 1 && !readOnly ? (
|
||||
<CredentialsSelect
|
||||
credentials={credentialsToShow}
|
||||
provider={provider}
|
||||
displayName={displayName}
|
||||
selectedCredentials={selectedCredentials}
|
||||
onSelectCredential={handleCredentialSelect}
|
||||
readOnly={readOnly}
|
||||
/>
|
||||
) : (
|
||||
<div className="mb-4 space-y-2">
|
||||
{credentialsToShow.map((credential) => {
|
||||
return (
|
||||
<CredentialRow
|
||||
key={credential.id}
|
||||
credential={credential}
|
||||
provider={provider}
|
||||
displayName={displayName}
|
||||
onSelect={() => handleCredentialSelect(credential.id)}
|
||||
onDelete={() =>
|
||||
handleDeleteCredential({
|
||||
id: credential.id,
|
||||
title: getCredentialDisplayName(
|
||||
credential,
|
||||
displayName,
|
||||
),
|
||||
})
|
||||
}
|
||||
readOnly={readOnly}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)}
|
||||
{!readOnly && (
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={handleActionButtonClick}
|
||||
className="w-fit"
|
||||
>
|
||||
{actionButtonText}
|
||||
const controller = new AbortController();
|
||||
setOAuthPopupController(controller);
|
||||
controller.signal.onabort = () => {
|
||||
console.debug("OAuth flow aborted");
|
||||
setOAuth2FlowInProgress(false);
|
||||
popup.close();
|
||||
};
|
||||
|
||||
const handleMessage = async (e: MessageEvent<OAuthPopupResultMessage>) => {
|
||||
console.debug("Message received:", e.data);
|
||||
if (
|
||||
typeof e.data != "object" ||
|
||||
!("message_type" in e.data) ||
|
||||
e.data.message_type !== "oauth_popup_result"
|
||||
) {
|
||||
console.debug("Ignoring irrelevant message");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!e.data.success) {
|
||||
console.error("OAuth flow failed:", e.data.message);
|
||||
setOAuthError(`OAuth flow failed: ${e.data.message}`);
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.data.state !== state_token) {
|
||||
console.error("Invalid state token received");
|
||||
setOAuthError("Invalid state token received");
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.debug("Processing OAuth callback");
|
||||
const credentials = await oAuthCallback(e.data.code, e.data.state);
|
||||
console.debug("OAuth callback processed successfully");
|
||||
onSelectCredentials({
|
||||
id: credentials.id,
|
||||
type: "oauth2",
|
||||
title: credentials.title,
|
||||
provider,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error in OAuth callback:", error);
|
||||
setOAuthError(
|
||||
// type of error is unkown so we need to use String(error)
|
||||
`Error in OAuth callback: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
} finally {
|
||||
console.debug("Finalizing OAuth flow");
|
||||
setOAuth2FlowInProgress(false);
|
||||
controller.abort("success");
|
||||
}
|
||||
};
|
||||
|
||||
console.debug("Adding message event listener");
|
||||
window.addEventListener("message", handleMessage, {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
setTimeout(
|
||||
() => {
|
||||
console.debug("OAuth flow timed out");
|
||||
controller.abort("timeout");
|
||||
setOAuth2FlowInProgress(false);
|
||||
setOAuthError("OAuth flow timed out");
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
}
|
||||
|
||||
const ProviderIcon = providerIcons[provider] || fallbackIcon;
|
||||
const modals = (
|
||||
<>
|
||||
{supportsApiKey && (
|
||||
<APIKeyCredentialsModal
|
||||
schema={schema}
|
||||
open={isAPICredentialsModalOpen}
|
||||
onClose={() => setAPICredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(credsMeta) => {
|
||||
onSelectCredentials(credsMeta);
|
||||
setAPICredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
)}
|
||||
{supportsOAuth2 && (
|
||||
<OAuthFlowWaitingModal
|
||||
open={isOAuth2FlowInProgress}
|
||||
onClose={() => oAuthPopupController?.abort("canceled")}
|
||||
providerName={providerName}
|
||||
/>
|
||||
)}
|
||||
{supportsUserPassword && (
|
||||
<PasswordCredentialsModal
|
||||
schema={schema}
|
||||
open={isUserPasswordCredentialsModalOpen}
|
||||
onClose={() => setUserPasswordCredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(creds) => {
|
||||
onSelectCredentials(creds);
|
||||
setUserPasswordCredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
)}
|
||||
{supportsHostScoped && (
|
||||
<HostScopedCredentialsModal
|
||||
schema={schema}
|
||||
open={isHostScopedCredentialsModalOpen}
|
||||
onClose={() => setHostScopedCredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(creds) => {
|
||||
onSelectCredentials(creds);
|
||||
setHostScopedCredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
|
||||
const fieldHeader = (
|
||||
<div className="mb-2 flex gap-1">
|
||||
<span className="text-m green text-gray-900">
|
||||
{providerName} Credentials
|
||||
</span>
|
||||
<InformationTooltip description={schema.description} />
|
||||
</div>
|
||||
);
|
||||
|
||||
// Show credentials creation UI when no relevant credentials exist
|
||||
if (!hasRelevantCredentials) {
|
||||
return (
|
||||
<div className="mb-4">
|
||||
{fieldHeader}
|
||||
|
||||
<div className={cn("flex flex-row space-x-2", className)}>
|
||||
{supportsOAuth2 && (
|
||||
<Button onClick={handleOAuthLogin} size="small">
|
||||
<ProviderIcon className="mr-2 h-4 w-4" />
|
||||
{"Sign in with " + providerName}
|
||||
</Button>
|
||||
)}
|
||||
</>
|
||||
) : (
|
||||
!readOnly && (
|
||||
<Button
|
||||
variant="secondary"
|
||||
size="small"
|
||||
onClick={handleActionButtonClick}
|
||||
className="w-fit"
|
||||
>
|
||||
{actionButtonText}
|
||||
</Button>
|
||||
)
|
||||
)}
|
||||
{supportsApiKey && (
|
||||
<Button
|
||||
onClick={() => setAPICredentialsModalOpen(true)}
|
||||
size="small"
|
||||
>
|
||||
<ProviderIcon className="mr-2 h-4 w-4" />
|
||||
Enter API key
|
||||
</Button>
|
||||
)}
|
||||
{supportsUserPassword && (
|
||||
<Button
|
||||
onClick={() => setUserPasswordCredentialsModalOpen(true)}
|
||||
size="small"
|
||||
>
|
||||
<ProviderIcon className="mr-2 h-4 w-4" />
|
||||
Enter username and password
|
||||
</Button>
|
||||
)}
|
||||
{supportsHostScoped && credentials.discriminatorValue && (
|
||||
<Button
|
||||
onClick={() => setHostScopedCredentialsModalOpen(true)}
|
||||
size="small"
|
||||
>
|
||||
<ProviderIcon className="mr-2 h-4 w-4" />
|
||||
{`Enter sensitive headers for ${getHostFromUrl(credentials.discriminatorValue)}`}
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
{modals}
|
||||
{oAuthError && (
|
||||
<div className="mt-2 text-red-500">Error: {oAuthError}</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
{!readOnly && (
|
||||
<>
|
||||
{supportsApiKey ? (
|
||||
<APIKeyCredentialsModal
|
||||
schema={schema}
|
||||
open={isAPICredentialsModalOpen}
|
||||
onClose={() => setAPICredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(credsMeta) => {
|
||||
onSelectCredentials(credsMeta);
|
||||
setAPICredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
) : null}
|
||||
{supportsOAuth2 ? (
|
||||
<OAuthFlowWaitingModal
|
||||
open={isOAuth2FlowInProgress}
|
||||
onClose={() => oAuthPopupController?.abort("canceled")}
|
||||
providerName={providerName}
|
||||
/>
|
||||
) : null}
|
||||
{supportsUserPassword ? (
|
||||
<PasswordCredentialsModal
|
||||
schema={schema}
|
||||
open={isUserPasswordCredentialsModalOpen}
|
||||
onClose={() => setUserPasswordCredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(creds) => {
|
||||
onSelectCredentials(creds);
|
||||
setUserPasswordCredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
) : null}
|
||||
{supportsHostScoped ? (
|
||||
<HostScopedCredentialsModal
|
||||
schema={schema}
|
||||
open={isHostScopedCredentialsModalOpen}
|
||||
onClose={() => setHostScopedCredentialsModalOpen(false)}
|
||||
onCredentialsCreate={(creds) => {
|
||||
onSelectCredentials(creds);
|
||||
setHostScopedCredentialsModalOpen(false);
|
||||
}}
|
||||
siblingInputs={siblingInputs}
|
||||
/>
|
||||
) : null}
|
||||
function handleValueChange(newValue: string) {
|
||||
if (newValue === "sign-in") {
|
||||
// Trigger OAuth2 sign in flow
|
||||
handleOAuthLogin();
|
||||
} else if (newValue === "add-api-key") {
|
||||
// Open API key dialog
|
||||
setAPICredentialsModalOpen(true);
|
||||
} else if (newValue === "add-user-password") {
|
||||
// Open user password dialog
|
||||
setUserPasswordCredentialsModalOpen(true);
|
||||
} else if (newValue === "add-host-scoped") {
|
||||
// Open host-scoped credentials dialog
|
||||
setHostScopedCredentialsModalOpen(true);
|
||||
} else {
|
||||
const selectedCreds = savedCredentials.find((c) => c.id == newValue)!;
|
||||
|
||||
{oAuthError ? (
|
||||
<Text variant="body" className="mt-2 text-red-500">
|
||||
Error: {oAuthError}
|
||||
</Text>
|
||||
) : null}
|
||||
onSelectCredentials({
|
||||
id: selectedCreds.id,
|
||||
type: selectedCreds.type,
|
||||
provider: provider,
|
||||
// title: customTitle, // TODO: add input for title
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
<DeleteConfirmationModal
|
||||
credentialToDelete={credentialToDelete}
|
||||
isDeleting={deleteCredentialsMutation.isPending}
|
||||
onClose={() => setCredentialToDelete(null)}
|
||||
onConfirm={handleDeleteConfirm}
|
||||
/>
|
||||
</>
|
||||
// Saved credentials exist
|
||||
return (
|
||||
<div>
|
||||
{fieldHeader}
|
||||
|
||||
<Select value={selectedCredentials?.id} onValueChange={handleValueChange}>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder={schema.placeholder} />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="nodrag">
|
||||
{savedCredentials
|
||||
.filter((c) => c.type == "oauth2")
|
||||
.map((credentials, index) => (
|
||||
<SelectItem key={index} value={credentials.id}>
|
||||
<ProviderIcon className="mr-2 inline h-4 w-4" />
|
||||
{credentials.title ||
|
||||
credentials.username ||
|
||||
`Your ${providerName} account`}
|
||||
</SelectItem>
|
||||
))}
|
||||
{savedCredentials
|
||||
.filter((c) => c.type == "api_key")
|
||||
.map((credentials, index) => (
|
||||
<SelectItem key={index} value={credentials.id}>
|
||||
<ProviderIcon className="mr-2 inline h-4 w-4" />
|
||||
<IconKey className="mr-1.5 inline" />
|
||||
{credentials.title}
|
||||
</SelectItem>
|
||||
))}
|
||||
{savedCredentials
|
||||
.filter((c) => c.type == "user_password")
|
||||
.map((credentials, index) => (
|
||||
<SelectItem key={index} value={credentials.id}>
|
||||
<ProviderIcon className="mr-2 inline h-4 w-4" />
|
||||
<IconUserPlus className="mr-1.5 inline" />
|
||||
{credentials.title}
|
||||
</SelectItem>
|
||||
))}
|
||||
{savedCredentials
|
||||
.filter((c) => c.type == "host_scoped")
|
||||
.map((credentials, index) => (
|
||||
<SelectItem key={index} value={credentials.id}>
|
||||
<ProviderIcon className="mr-2 inline h-4 w-4" />
|
||||
<IconKey className="mr-1.5 inline" />
|
||||
{credentials.title}
|
||||
</SelectItem>
|
||||
))}
|
||||
<SelectSeparator />
|
||||
{supportsOAuth2 && (
|
||||
<SelectItem value="sign-in">
|
||||
<IconUserPlus className="mr-1.5 inline" />
|
||||
Sign in with {providerName}
|
||||
</SelectItem>
|
||||
)}
|
||||
{supportsApiKey && (
|
||||
<SelectItem value="add-api-key">
|
||||
<IconKeyPlus className="mr-1.5 inline" />
|
||||
Add new API key
|
||||
</SelectItem>
|
||||
)}
|
||||
{supportsUserPassword && (
|
||||
<SelectItem value="add-user-password">
|
||||
<IconUserPlus className="mr-1.5 inline" />
|
||||
Add new user password
|
||||
</SelectItem>
|
||||
)}
|
||||
{supportsHostScoped && (
|
||||
<SelectItem value="add-host-scoped">
|
||||
<IconKey className="mr-1.5 inline" />
|
||||
Add host-scoped headers
|
||||
</SelectItem>
|
||||
)}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{modals}
|
||||
{oAuthError && (
|
||||
<div className="mt-2 text-red-500">Error: {oAuthError}</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
import { Form, FormField } from "@/components/__legacy__/ui/form";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { z } from "zod";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { Form, FormField } from "@/components/__legacy__/ui/form";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import {
|
||||
BlockIOCredentialsSubSchema,
|
||||
CredentialsMetaInput,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { z } from "zod";
|
||||
|
||||
type Props = {
|
||||
schema: BlockIOCredentialsSubSchema;
|
||||
@@ -85,7 +85,7 @@ export function PasswordCredentialsModal({
|
||||
<Form {...form}>
|
||||
<form
|
||||
onSubmit={form.handleSubmit(onSubmit)}
|
||||
className="w-[98%] space-y-2 pt-4"
|
||||
className="space-y-2 pt-4"
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
@@ -96,6 +96,7 @@ export function PasswordCredentialsModal({
|
||||
label="Username"
|
||||
type="text"
|
||||
placeholder="Enter username..."
|
||||
size="small"
|
||||
{...field}
|
||||
/>
|
||||
)}
|
||||
@@ -109,6 +110,7 @@ export function PasswordCredentialsModal({
|
||||
label="Password"
|
||||
type="password"
|
||||
placeholder="Enter password..."
|
||||
size="small"
|
||||
{...field}
|
||||
/>
|
||||
)}
|
||||
@@ -122,12 +124,12 @@ export function PasswordCredentialsModal({
|
||||
label="Name"
|
||||
type="text"
|
||||
placeholder="Enter a name for this user login..."
|
||||
className="mb-8"
|
||||
size="small"
|
||||
{...field}
|
||||
/>
|
||||
)}
|
||||
/>
|
||||
<Button type="submit" className="w-full">
|
||||
<Button type="submit" size="small" className="min-w-68">
|
||||
Save & use this user login
|
||||
</Button>
|
||||
</form>
|
||||
@@ -1,102 +0,0 @@
|
||||
import { IconKey } from "@/components/__legacy__/ui/icons";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/molecules/DropdownMenu/DropdownMenu";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { CaretDown, DotsThreeVertical } from "@phosphor-icons/react";
|
||||
import {
|
||||
fallbackIcon,
|
||||
getCredentialDisplayName,
|
||||
MASKED_KEY_LENGTH,
|
||||
providerIcons,
|
||||
} from "../../helpers";
|
||||
|
||||
type CredentialRowProps = {
|
||||
credential: {
|
||||
id: string;
|
||||
title?: string;
|
||||
username?: string;
|
||||
type: string;
|
||||
provider: string;
|
||||
};
|
||||
provider: string;
|
||||
displayName: string;
|
||||
onSelect: () => void;
|
||||
onDelete: () => void;
|
||||
readOnly?: boolean;
|
||||
showCaret?: boolean;
|
||||
asSelectTrigger?: boolean;
|
||||
};
|
||||
|
||||
export function CredentialRow({
|
||||
credential,
|
||||
provider,
|
||||
displayName,
|
||||
onSelect,
|
||||
onDelete,
|
||||
readOnly = false,
|
||||
showCaret = false,
|
||||
asSelectTrigger = false,
|
||||
}: CredentialRowProps) {
|
||||
const ProviderIcon = providerIcons[provider] || fallbackIcon;
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"flex items-center gap-3 rounded-medium border border-zinc-200 bg-white p-3 transition-colors",
|
||||
asSelectTrigger ? "border-0 bg-transparent" : readOnly ? "w-fit" : "",
|
||||
)}
|
||||
onClick={readOnly || showCaret || asSelectTrigger ? undefined : onSelect}
|
||||
style={
|
||||
readOnly || showCaret || asSelectTrigger
|
||||
? { cursor: showCaret || asSelectTrigger ? "pointer" : "default" }
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
<div className="flex h-6 w-6 shrink-0 items-center justify-center rounded-full bg-gray-900">
|
||||
<ProviderIcon className="h-3 w-3 text-white" />
|
||||
</div>
|
||||
<IconKey className="h-5 w-5 shrink-0 text-zinc-800" />
|
||||
<div className="flex min-w-0 flex-1 flex-nowrap items-center gap-4">
|
||||
<Text variant="body" className="tracking-tight">
|
||||
{getCredentialDisplayName(credential, displayName)}
|
||||
</Text>
|
||||
<Text
|
||||
variant="large"
|
||||
className="relative top-1 font-mono tracking-tight"
|
||||
>
|
||||
{"*".repeat(MASKED_KEY_LENGTH)}
|
||||
</Text>
|
||||
</div>
|
||||
{showCaret && !asSelectTrigger && (
|
||||
<CaretDown className="h-4 w-4 shrink-0 text-gray-400" />
|
||||
)}
|
||||
{!readOnly && !showCaret && !asSelectTrigger && (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<button
|
||||
className="ml-auto shrink-0 rounded p-1 hover:bg-gray-100"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<DotsThreeVertical className="h-5 w-5 text-gray-400" />
|
||||
</button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDelete();
|
||||
}}
|
||||
>
|
||||
Delete
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/__legacy__/ui/select";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { CredentialsMetaInput } from "@/lib/autogpt-server-api/types";
|
||||
import { useEffect } from "react";
|
||||
import { getCredentialDisplayName } from "../../helpers";
|
||||
import { CredentialRow } from "../CredentialRow/CredentialRow";
|
||||
|
||||
interface Props {
|
||||
credentials: Array<{
|
||||
id: string;
|
||||
title?: string;
|
||||
username?: string;
|
||||
type: string;
|
||||
provider: string;
|
||||
}>;
|
||||
provider: string;
|
||||
displayName: string;
|
||||
selectedCredentials?: CredentialsMetaInput;
|
||||
onSelectCredential: (credentialId: string) => void;
|
||||
readOnly?: boolean;
|
||||
}
|
||||
|
||||
export function CredentialsSelect({
|
||||
credentials,
|
||||
provider,
|
||||
displayName,
|
||||
selectedCredentials,
|
||||
onSelectCredential,
|
||||
readOnly = false,
|
||||
}: Props) {
|
||||
// Auto-select first credential if none is selected
|
||||
useEffect(() => {
|
||||
if (!selectedCredentials && credentials.length > 0) {
|
||||
onSelectCredential(credentials[0].id);
|
||||
}
|
||||
}, [selectedCredentials, credentials, onSelectCredential]);
|
||||
|
||||
return (
|
||||
<div className="mb-4 w-full">
|
||||
<Select
|
||||
value={selectedCredentials?.id || ""}
|
||||
onValueChange={(value) => onSelectCredential(value)}
|
||||
>
|
||||
<SelectTrigger className="h-auto min-h-12 w-full rounded-medium border-zinc-200 p-0 pr-4 shadow-none">
|
||||
{selectedCredentials ? (
|
||||
<SelectValue key={selectedCredentials.id} asChild>
|
||||
<CredentialRow
|
||||
credential={{
|
||||
id: selectedCredentials.id,
|
||||
title: selectedCredentials.title || undefined,
|
||||
type: selectedCredentials.type,
|
||||
provider: selectedCredentials.provider,
|
||||
}}
|
||||
provider={provider}
|
||||
displayName={displayName}
|
||||
onSelect={() => {}}
|
||||
onDelete={() => {}}
|
||||
readOnly={readOnly}
|
||||
asSelectTrigger={true}
|
||||
/>
|
||||
</SelectValue>
|
||||
) : (
|
||||
<SelectValue key="placeholder" placeholder="Select credential" />
|
||||
)}
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{credentials.map((credential) => (
|
||||
<SelectItem key={credential.id} value={credential.id}>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="tracking-tight">
|
||||
{getCredentialDisplayName(credential, displayName)}
|
||||
</Text>
|
||||
</div>
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
|
||||
interface Props {
|
||||
credentialToDelete: { id: string; title: string } | null;
|
||||
isDeleting: boolean;
|
||||
onClose: () => void;
|
||||
onConfirm: () => void;
|
||||
}
|
||||
|
||||
export function DeleteConfirmationModal({
|
||||
credentialToDelete,
|
||||
isDeleting,
|
||||
onClose,
|
||||
onConfirm,
|
||||
}: Props) {
|
||||
return (
|
||||
<Dialog
|
||||
controlled={{
|
||||
isOpen: credentialToDelete !== null,
|
||||
set: (open) => {
|
||||
if (!open) onClose();
|
||||
},
|
||||
}}
|
||||
title="Delete credential"
|
||||
styling={{ maxWidth: "32rem" }}
|
||||
>
|
||||
<Dialog.Content>
|
||||
<Text variant="large">
|
||||
Are you sure you want to delete "{credentialToDelete?.title}
|
||||
"? This action cannot be undone.
|
||||
</Text>
|
||||
<Dialog.Footer>
|
||||
<Button variant="secondary" onClick={onClose} disabled={isDeleting}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={onConfirm}
|
||||
loading={isDeleting}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</Dialog.Footer>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,102 +0,0 @@
|
||||
import { KeyIcon } from "@phosphor-icons/react";
|
||||
import { NotionLogoIcon } from "@radix-ui/react-icons";
|
||||
import {
|
||||
FaDiscord,
|
||||
FaGithub,
|
||||
FaGoogle,
|
||||
FaHubspot,
|
||||
FaMedium,
|
||||
FaTwitter,
|
||||
} from "react-icons/fa";
|
||||
|
||||
export const fallbackIcon = KeyIcon;
|
||||
|
||||
export const providerIcons: Partial<
|
||||
Record<string, React.FC<{ className?: string }>>
|
||||
> = {
|
||||
aiml_api: fallbackIcon,
|
||||
anthropic: fallbackIcon,
|
||||
apollo: fallbackIcon,
|
||||
e2b: fallbackIcon,
|
||||
github: FaGithub,
|
||||
google: FaGoogle,
|
||||
groq: fallbackIcon,
|
||||
http: fallbackIcon,
|
||||
notion: NotionLogoIcon,
|
||||
nvidia: fallbackIcon,
|
||||
discord: FaDiscord,
|
||||
d_id: fallbackIcon,
|
||||
google_maps: FaGoogle,
|
||||
jina: fallbackIcon,
|
||||
ideogram: fallbackIcon,
|
||||
linear: fallbackIcon,
|
||||
medium: FaMedium,
|
||||
mem0: fallbackIcon,
|
||||
ollama: fallbackIcon,
|
||||
openai: fallbackIcon,
|
||||
openweathermap: fallbackIcon,
|
||||
open_router: fallbackIcon,
|
||||
llama_api: fallbackIcon,
|
||||
pinecone: fallbackIcon,
|
||||
enrichlayer: fallbackIcon,
|
||||
slant3d: fallbackIcon,
|
||||
screenshotone: fallbackIcon,
|
||||
smtp: fallbackIcon,
|
||||
replicate: fallbackIcon,
|
||||
reddit: fallbackIcon,
|
||||
fal: fallbackIcon,
|
||||
revid: fallbackIcon,
|
||||
twitter: FaTwitter,
|
||||
unreal_speech: fallbackIcon,
|
||||
exa: fallbackIcon,
|
||||
hubspot: FaHubspot,
|
||||
smartlead: fallbackIcon,
|
||||
todoist: fallbackIcon,
|
||||
zerobounce: fallbackIcon,
|
||||
};
|
||||
|
||||
export type OAuthPopupResultMessage = { message_type: "oauth_popup_result" } & (
|
||||
| {
|
||||
success: true;
|
||||
code: string;
|
||||
state: string;
|
||||
}
|
||||
| {
|
||||
success: false;
|
||||
message: string;
|
||||
}
|
||||
);
|
||||
|
||||
export function getActionButtonText(
|
||||
supportsOAuth2: boolean,
|
||||
supportsApiKey: boolean,
|
||||
supportsUserPassword: boolean,
|
||||
supportsHostScoped: boolean,
|
||||
hasExistingCredentials: boolean,
|
||||
): string {
|
||||
if (hasExistingCredentials) {
|
||||
if (supportsOAuth2) return "Connect another account";
|
||||
if (supportsApiKey) return "Use a new API key";
|
||||
if (supportsUserPassword) return "Add a new username and password";
|
||||
if (supportsHostScoped) return "Add new headers";
|
||||
return "Add new credentials";
|
||||
} else {
|
||||
if (supportsOAuth2) return "Add account";
|
||||
if (supportsApiKey) return "Add API key";
|
||||
if (supportsUserPassword) return "Add username and password";
|
||||
if (supportsHostScoped) return "Add headers";
|
||||
return "Add credentials";
|
||||
}
|
||||
}
|
||||
|
||||
export function getCredentialDisplayName(
|
||||
credential: { title?: string; username?: string },
|
||||
displayName: string,
|
||||
): string {
|
||||
return (
|
||||
credential.title || credential.username || `Your ${displayName} account`
|
||||
);
|
||||
}
|
||||
|
||||
export const OAUTH_TIMEOUT_MS = 5 * 60 * 1000;
|
||||
export const MASKED_KEY_LENGTH = 30;
|
||||
@@ -1,318 +0,0 @@
|
||||
import { useDeleteV1DeleteCredentials } from "@/app/api/__generated__/endpoints/integrations/integrations";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import {
|
||||
BlockIOCredentialsSubSchema,
|
||||
CredentialsMetaInput,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useContext, useEffect, useMemo, useState } from "react";
|
||||
import {
|
||||
getActionButtonText,
|
||||
OAUTH_TIMEOUT_MS,
|
||||
OAuthPopupResultMessage,
|
||||
} from "./helpers";
|
||||
|
||||
type Args = {
|
||||
schema: BlockIOCredentialsSubSchema;
|
||||
selectedCredentials?: CredentialsMetaInput;
|
||||
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
|
||||
siblingInputs?: Record<string, any>;
|
||||
onLoaded?: (loaded: boolean) => void;
|
||||
readOnly?: boolean;
|
||||
};
|
||||
|
||||
export function useCredentialsInputs({
|
||||
schema,
|
||||
selectedCredentials,
|
||||
onSelectCredentials,
|
||||
siblingInputs,
|
||||
onLoaded,
|
||||
readOnly = false,
|
||||
}: Args) {
|
||||
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
|
||||
useState(false);
|
||||
const [
|
||||
isUserPasswordCredentialsModalOpen,
|
||||
setUserPasswordCredentialsModalOpen,
|
||||
] = useState(false);
|
||||
const [isHostScopedCredentialsModalOpen, setHostScopedCredentialsModalOpen] =
|
||||
useState(false);
|
||||
const [isOAuth2FlowInProgress, setOAuth2FlowInProgress] = useState(false);
|
||||
const [oAuthPopupController, setOAuthPopupController] =
|
||||
useState<AbortController | null>(null);
|
||||
const [oAuthError, setOAuthError] = useState<string | null>(null);
|
||||
const [credentialToDelete, setCredentialToDelete] = useState<{
|
||||
id: string;
|
||||
title: string;
|
||||
} | null>(null);
|
||||
|
||||
const api = useBackendAPI();
|
||||
const queryClient = useQueryClient();
|
||||
const credentials = useCredentials(schema, siblingInputs);
|
||||
const allProviders = useContext(CredentialsProvidersContext);
|
||||
|
||||
const deleteCredentialsMutation = useDeleteV1DeleteCredentials({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: ["/api/integrations/credentials"],
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: [`/api/integrations/${credentials?.provider}/credentials`],
|
||||
});
|
||||
setCredentialToDelete(null);
|
||||
if (selectedCredentials?.id === credentialToDelete?.id) {
|
||||
onSelectCredentials(undefined);
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const rawProvider = credentials
|
||||
? allProviders?.[credentials.provider as keyof typeof allProviders]
|
||||
: null;
|
||||
|
||||
useEffect(() => {
|
||||
if (onLoaded) {
|
||||
onLoaded(Boolean(credentials && credentials.isLoading === false));
|
||||
}
|
||||
}, [credentials, onLoaded]);
|
||||
|
||||
useEffect(() => {
|
||||
if (readOnly) return;
|
||||
if (!credentials || !("savedCredentials" in credentials)) return;
|
||||
if (
|
||||
selectedCredentials &&
|
||||
!credentials.savedCredentials.some((c) => c.id === selectedCredentials.id)
|
||||
) {
|
||||
onSelectCredentials(undefined);
|
||||
}
|
||||
}, [credentials, selectedCredentials, onSelectCredentials, readOnly]);
|
||||
|
||||
const { singleCredential } = useMemo(() => {
|
||||
if (!credentials || !("savedCredentials" in credentials)) {
|
||||
return {
|
||||
singleCredential: null,
|
||||
};
|
||||
}
|
||||
|
||||
const single =
|
||||
credentials.savedCredentials.length === 1
|
||||
? credentials.savedCredentials[0]
|
||||
: null;
|
||||
|
||||
return {
|
||||
singleCredential: single,
|
||||
};
|
||||
}, [credentials]);
|
||||
|
||||
useEffect(() => {
|
||||
if (readOnly) return;
|
||||
if (singleCredential && !selectedCredentials) {
|
||||
onSelectCredentials(singleCredential);
|
||||
}
|
||||
}, [singleCredential, selectedCredentials, onSelectCredentials, readOnly]);
|
||||
|
||||
if (
|
||||
!credentials ||
|
||||
credentials.isLoading ||
|
||||
!("savedCredentials" in credentials)
|
||||
) {
|
||||
return {
|
||||
isLoading: true,
|
||||
};
|
||||
}
|
||||
|
||||
const {
|
||||
provider,
|
||||
providerName,
|
||||
supportsApiKey,
|
||||
supportsOAuth2,
|
||||
supportsUserPassword,
|
||||
supportsHostScoped,
|
||||
savedCredentials,
|
||||
oAuthCallback,
|
||||
} = credentials;
|
||||
|
||||
const allSavedCredentials = rawProvider?.savedCredentials || savedCredentials;
|
||||
|
||||
const credentialsToShow = (() => {
|
||||
const creds = [...allSavedCredentials];
|
||||
if (
|
||||
!readOnly &&
|
||||
selectedCredentials &&
|
||||
!creds.some((c) => c.id === selectedCredentials.id)
|
||||
) {
|
||||
creds.push({
|
||||
id: selectedCredentials.id,
|
||||
type: selectedCredentials.type,
|
||||
title: selectedCredentials.title || "Selected credential",
|
||||
provider: provider,
|
||||
} as any);
|
||||
}
|
||||
return creds;
|
||||
})();
|
||||
|
||||
async function handleOAuthLogin() {
|
||||
setOAuthError(null);
|
||||
const { login_url, state_token } = await api.oAuthLogin(
|
||||
provider,
|
||||
schema.credentials_scopes,
|
||||
);
|
||||
setOAuth2FlowInProgress(true);
|
||||
const popup = window.open(login_url, "_blank", "popup=true");
|
||||
|
||||
if (!popup) {
|
||||
throw new Error(
|
||||
"Failed to open popup window. Please allow popups for this site.",
|
||||
);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
setOAuthPopupController(controller);
|
||||
controller.signal.onabort = () => {
|
||||
console.debug("OAuth flow aborted");
|
||||
setOAuth2FlowInProgress(false);
|
||||
popup.close();
|
||||
};
|
||||
|
||||
const handleMessage = async (e: MessageEvent<OAuthPopupResultMessage>) => {
|
||||
console.debug("Message received:", e.data);
|
||||
if (
|
||||
typeof e.data != "object" ||
|
||||
!("message_type" in e.data) ||
|
||||
e.data.message_type !== "oauth_popup_result"
|
||||
) {
|
||||
console.debug("Ignoring irrelevant message");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!e.data.success) {
|
||||
console.error("OAuth flow failed:", e.data.message);
|
||||
setOAuthError(`OAuth flow failed: ${e.data.message}`);
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.data.state !== state_token) {
|
||||
console.error("Invalid state token received");
|
||||
setOAuthError("Invalid state token received");
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.debug("Processing OAuth callback");
|
||||
const credentials = await oAuthCallback(e.data.code, e.data.state);
|
||||
console.debug("OAuth callback processed successfully");
|
||||
onSelectCredentials({
|
||||
id: credentials.id,
|
||||
type: "oauth2",
|
||||
title: credentials.title,
|
||||
provider,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error in OAuth callback:", error);
|
||||
setOAuthError(
|
||||
`Error in OAuth callback: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
} finally {
|
||||
console.debug("Finalizing OAuth flow");
|
||||
setOAuth2FlowInProgress(false);
|
||||
controller.abort("success");
|
||||
}
|
||||
};
|
||||
|
||||
console.debug("Adding message event listener");
|
||||
window.addEventListener("message", handleMessage, {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
console.debug("OAuth flow timed out");
|
||||
controller.abort("timeout");
|
||||
setOAuth2FlowInProgress(false);
|
||||
setOAuthError("OAuth flow timed out");
|
||||
}, OAUTH_TIMEOUT_MS);
|
||||
}
|
||||
|
||||
function handleActionButtonClick() {
|
||||
if (supportsOAuth2) {
|
||||
handleOAuthLogin();
|
||||
} else if (supportsApiKey) {
|
||||
setAPICredentialsModalOpen(true);
|
||||
} else if (supportsUserPassword) {
|
||||
setUserPasswordCredentialsModalOpen(true);
|
||||
} else if (supportsHostScoped) {
|
||||
setHostScopedCredentialsModalOpen(true);
|
||||
}
|
||||
}
|
||||
|
||||
function handleCredentialSelect(credentialId: string) {
|
||||
const selectedCreds = credentialsToShow.find((c) => c.id === credentialId);
|
||||
if (selectedCreds) {
|
||||
onSelectCredentials({
|
||||
id: selectedCreds.id,
|
||||
type: selectedCreds.type,
|
||||
provider: provider,
|
||||
title: (selectedCreds as any).title,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function handleDeleteCredential(credential: { id: string; title: string }) {
|
||||
setCredentialToDelete(credential);
|
||||
}
|
||||
|
||||
function handleDeleteConfirm() {
|
||||
if (credentialToDelete && credentials) {
|
||||
deleteCredentialsMutation.mutate({
|
||||
provider: credentials.provider,
|
||||
credId: credentialToDelete.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
isLoading: false as const,
|
||||
provider,
|
||||
providerName,
|
||||
supportsApiKey,
|
||||
supportsOAuth2,
|
||||
supportsUserPassword,
|
||||
supportsHostScoped,
|
||||
credentialsToShow,
|
||||
selectedCredentials,
|
||||
oAuthError,
|
||||
isAPICredentialsModalOpen,
|
||||
isUserPasswordCredentialsModalOpen,
|
||||
isHostScopedCredentialsModalOpen,
|
||||
isOAuth2FlowInProgress,
|
||||
oAuthPopupController,
|
||||
credentialToDelete,
|
||||
deleteCredentialsMutation,
|
||||
actionButtonText: getActionButtonText(
|
||||
supportsOAuth2,
|
||||
supportsApiKey,
|
||||
supportsUserPassword,
|
||||
supportsHostScoped,
|
||||
credentialsToShow.length > 0,
|
||||
),
|
||||
setAPICredentialsModalOpen,
|
||||
setUserPasswordCredentialsModalOpen,
|
||||
setHostScopedCredentialsModalOpen,
|
||||
setCredentialToDelete,
|
||||
handleActionButtonClick,
|
||||
handleCredentialSelect,
|
||||
handleDeleteCredential,
|
||||
handleDeleteConfirm,
|
||||
handleOAuthLogin,
|
||||
onSelectCredentials,
|
||||
schema,
|
||||
siblingInputs,
|
||||
};
|
||||
}
|
||||
@@ -3,17 +3,14 @@
|
||||
import { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo";
|
||||
import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { AlarmIcon } from "@phosphor-icons/react";
|
||||
import { useState } from "react";
|
||||
import { ScheduleAgentModal } from "../ScheduleAgentModal/ScheduleAgentModal";
|
||||
import { AgentCostSection } from "./components/AgentCostSection/AgentCostSection";
|
||||
import { AgentDetails } from "./components/AgentDetails/AgentDetails";
|
||||
import { AgentSectionHeader } from "./components/AgentSectionHeader/AgentSectionHeader";
|
||||
import { ModalHeader } from "./components/ModalHeader/ModalHeader";
|
||||
import { ModalRunSection } from "./components/ModalRunSection/ModalRunSection";
|
||||
import { RunActions } from "./components/RunActions/RunActions";
|
||||
@@ -23,20 +20,16 @@ import { useAgentRunModal } from "./useAgentRunModal";
|
||||
interface Props {
|
||||
triggerSlot: React.ReactNode;
|
||||
agent: LibraryAgent;
|
||||
initialInputValues?: Record<string, any>;
|
||||
initialInputCredentials?: Record<string, any>;
|
||||
agentId: string;
|
||||
agentVersion?: number;
|
||||
onRunCreated?: (execution: GraphExecutionMeta) => void;
|
||||
onTriggerSetup?: (preset: LibraryAgentPreset) => void;
|
||||
onScheduleCreated?: (schedule: GraphExecutionJobInfo) => void;
|
||||
}
|
||||
|
||||
export function RunAgentModal({
|
||||
triggerSlot,
|
||||
agent,
|
||||
initialInputValues,
|
||||
initialInputCredentials,
|
||||
onRunCreated,
|
||||
onTriggerSetup,
|
||||
onScheduleCreated,
|
||||
}: Props) {
|
||||
const {
|
||||
@@ -76,9 +69,6 @@ export function RunAgentModal({
|
||||
handleRun,
|
||||
} = useAgentRunModal(agent, {
|
||||
onRun: onRunCreated,
|
||||
onSetupTrigger: onTriggerSetup,
|
||||
initialInputValues,
|
||||
initialInputCredentials,
|
||||
});
|
||||
|
||||
const [isScheduleModalOpen, setIsScheduleModalOpen] = useState(false);
|
||||
@@ -87,8 +77,6 @@ export function RunAgentModal({
|
||||
Object.keys(agentInputFields || {}).length > 0 ||
|
||||
Object.keys(agentCredentialsInputFields || {}).length > 0;
|
||||
|
||||
const isTriggerRunType = defaultRunType.includes("trigger");
|
||||
|
||||
function handleInputChange(key: string, value: string) {
|
||||
setInputValues((prev) => ({
|
||||
...prev,
|
||||
@@ -134,74 +122,70 @@ export function RunAgentModal({
|
||||
>
|
||||
<Dialog.Trigger>{triggerSlot}</Dialog.Trigger>
|
||||
<Dialog.Content>
|
||||
{/* Header */}
|
||||
<ModalHeader agent={agent} />
|
||||
|
||||
{/* Content */}
|
||||
{hasAnySetupFields ? (
|
||||
<div className="mt-10">
|
||||
<RunAgentModalContextProvider
|
||||
value={{
|
||||
agent,
|
||||
defaultRunType,
|
||||
presetName,
|
||||
setPresetName,
|
||||
presetDescription,
|
||||
setPresetDescription,
|
||||
inputValues,
|
||||
setInputValue: handleInputChange,
|
||||
agentInputFields,
|
||||
inputCredentials,
|
||||
setInputCredentialsValue: handleCredentialsChange,
|
||||
agentCredentialsInputFields,
|
||||
}}
|
||||
>
|
||||
<ModalRunSection />
|
||||
</RunAgentModalContextProvider>
|
||||
<div className="flex h-full flex-col pb-4">
|
||||
{/* Header */}
|
||||
<div className="flex-shrink-0">
|
||||
<ModalHeader agent={agent} />
|
||||
<AgentCostSection flowId={agent.graph_id} />
|
||||
</div>
|
||||
) : null}
|
||||
|
||||
<Dialog.Footer className="mt-6 bg-white pt-4">
|
||||
{/* Scrollable content */}
|
||||
<div className="flex-1 pr-1" style={{ scrollbarGutter: "stable" }}>
|
||||
{/* Setup Section */}
|
||||
<div className="mt-10">
|
||||
{hasAnySetupFields ? (
|
||||
<RunAgentModalContextProvider
|
||||
value={{
|
||||
agent,
|
||||
defaultRunType,
|
||||
presetName,
|
||||
setPresetName,
|
||||
presetDescription,
|
||||
setPresetDescription,
|
||||
inputValues,
|
||||
setInputValue: handleInputChange,
|
||||
agentInputFields,
|
||||
inputCredentials,
|
||||
setInputCredentialsValue: handleCredentialsChange,
|
||||
agentCredentialsInputFields,
|
||||
}}
|
||||
>
|
||||
<>
|
||||
<AgentSectionHeader
|
||||
title={
|
||||
defaultRunType === "automatic-trigger"
|
||||
? "Trigger Setup"
|
||||
: "Agent Setup"
|
||||
}
|
||||
/>
|
||||
<ModalRunSection />
|
||||
</>
|
||||
</RunAgentModalContextProvider>
|
||||
) : null}
|
||||
</div>
|
||||
|
||||
{/* Agent Details Section */}
|
||||
<div className="mt-8">
|
||||
<AgentSectionHeader title="Agent Details" />
|
||||
<AgentDetails agent={agent} />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<Dialog.Footer
|
||||
className="fixed bottom-1 left-0 z-10 w-full bg-white p-4"
|
||||
style={{ boxShadow: "0px -8px 10px white" }}
|
||||
>
|
||||
<div className="flex items-center justify-end gap-3">
|
||||
{isTriggerRunType ? null : !allRequiredInputsAreSet ? (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span>
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={handleOpenScheduleModal}
|
||||
disabled={
|
||||
isExecuting ||
|
||||
isSettingUpTrigger ||
|
||||
!allRequiredInputsAreSet
|
||||
}
|
||||
>
|
||||
Schedule Task
|
||||
</Button>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
Please set up all required inputs and credentials before
|
||||
scheduling
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
) : (
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={handleOpenScheduleModal}
|
||||
disabled={
|
||||
isExecuting ||
|
||||
isSettingUpTrigger ||
|
||||
!allRequiredInputsAreSet
|
||||
}
|
||||
>
|
||||
Schedule Task
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant="secondary"
|
||||
onClick={handleOpenScheduleModal}
|
||||
disabled={
|
||||
isExecuting || isSettingUpTrigger || !allRequiredInputsAreSet
|
||||
}
|
||||
>
|
||||
<AlarmIcon size={16} />
|
||||
Schedule Agent
|
||||
</Button>
|
||||
<RunActions
|
||||
defaultRunType={defaultRunType}
|
||||
onRun={handleRun}
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
|
||||
interface Props {
|
||||
flowId: string;
|
||||
}
|
||||
|
||||
export function AgentCostSection({ flowId }: Props) {
|
||||
return (
|
||||
<div className="mt-6 flex items-center justify-between">
|
||||
{/* TODO: enable once we have an API to show estimated cost for an agent run */}
|
||||
{/* <div className="flex items-center gap-2">
|
||||
<Text variant="body-medium">Cost</Text>
|
||||
<Text variant="body">{cost}</Text>
|
||||
</div> */}
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
size="small"
|
||||
as="NextLink"
|
||||
href={`/build?flowID=${flowId}`}
|
||||
>
|
||||
Open in builder
|
||||
</Button>
|
||||
{/* TODO: enable once we can easily link to the agent listing page from the library agent response */}
|
||||
{/* <Button variant="outline" size="small">
|
||||
View listing <ArrowSquareOutIcon size={16} />
|
||||
</Button> */}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Badge } from "@/components/atoms/Badge/Badge";
|
||||
import { formatDate } from "@/lib/utils/time";
|
||||
|
||||
interface Props {
|
||||
agent: LibraryAgent;
|
||||
}
|
||||
|
||||
export function AgentDetails({ agent }: Props) {
|
||||
return (
|
||||
<div className="mt-4 flex flex-col gap-5">
|
||||
<div>
|
||||
<Text variant="body-medium" className="mb-1 !text-black">
|
||||
Version
|
||||
</Text>
|
||||
<div className="flex items-center gap-2">
|
||||
<Text variant="body" className="!text-zinc-700">
|
||||
v{agent.graph_version}
|
||||
</Text>
|
||||
{agent.is_latest_version && (
|
||||
<Badge variant="success" size="small">
|
||||
Latest
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
<Text variant="body-medium" className="mb-1 !text-black">
|
||||
Last Updated
|
||||
</Text>
|
||||
<Text variant="body" className="!text-zinc-700">
|
||||
{formatDate(agent.updated_at)}
|
||||
</Text>
|
||||
</div>
|
||||
{agent.has_external_trigger && (
|
||||
<div>
|
||||
<Text variant="body-medium" className="mb-1">
|
||||
Trigger Type
|
||||
</Text>
|
||||
<Text variant="body" className="!text-neutral-700">
|
||||
External Webhook
|
||||
</Text>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
interface Props {
|
||||
title: string;
|
||||
}
|
||||
|
||||
export function AgentSectionHeader({ title }: Props) {
|
||||
return (
|
||||
<div className="border-t border-zinc-400 px-0 pb-2 pt-1">
|
||||
<Text variant="label" className="!text-zinc-700">
|
||||
{title}
|
||||
</Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { Badge } from "@/components/atoms/Badge/Badge";
|
||||
import { Link } from "@/components/atoms/Link/Link";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { ShowMoreText } from "@/components/molecules/ShowMoreText/ShowMoreText";
|
||||
import { ClockIcon, InfoIcon } from "@phosphor-icons/react";
|
||||
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
||||
|
||||
interface ModalHeaderProps {
|
||||
@@ -10,56 +10,49 @@ interface ModalHeaderProps {
|
||||
}
|
||||
|
||||
export function ModalHeader({ agent }: ModalHeaderProps) {
|
||||
const creator = agent.marketplace_listing?.creator;
|
||||
const isUnknownCreator = agent.creator_name === "Unknown";
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4">
|
||||
<Badge variant="info" className="w-fit">
|
||||
New Task
|
||||
</Badge>
|
||||
<div className="space-y-4">
|
||||
<div className="flex items-center gap-3">
|
||||
<Badge variant="info">New Run</Badge>
|
||||
</div>
|
||||
<div>
|
||||
<Text variant="h2">{agent.name}</Text>
|
||||
{creator ? (
|
||||
<Link href={`/marketplace/creator/${creator.slug}`} isExternal>
|
||||
by {creator.name}
|
||||
</Link>
|
||||
<Text variant="h3">{agent.name}</Text>
|
||||
{!isUnknownCreator ? (
|
||||
<Text variant="body-medium">by {agent.creator_name}</Text>
|
||||
) : null}
|
||||
<ShowMoreText
|
||||
previewLimit={80}
|
||||
variant="small"
|
||||
className="mt-4 !text-zinc-700"
|
||||
>
|
||||
{agent.description}
|
||||
</ShowMoreText>
|
||||
|
||||
{agent.description ? (
|
||||
<ShowMoreText
|
||||
previewLimit={400}
|
||||
variant="small"
|
||||
className="mt-4 !text-zinc-700"
|
||||
>
|
||||
{agent.description}
|
||||
</ShowMoreText>
|
||||
) : null}
|
||||
|
||||
{agent.recommended_schedule_cron && !agent.has_external_trigger ? (
|
||||
<div className="flex flex-col gap-4 rounded-medium border border-blue-100 bg-blue-50 p-4">
|
||||
<Text variant="lead-semibold" className="text-blue-600">
|
||||
Tip
|
||||
</Text>
|
||||
<Text variant="body">
|
||||
For best results, run this agent{" "}
|
||||
{/* Schedule recommendation tip */}
|
||||
{agent.recommended_schedule_cron && !agent.has_external_trigger && (
|
||||
<div className="mt-4 flex items-center gap-2">
|
||||
<ClockIcon className="h-4 w-4 text-gray-500" />
|
||||
<p className="text-sm text-gray-600">
|
||||
<strong>Tip:</strong> For best results, run this agent{" "}
|
||||
{humanizeCronExpression(
|
||||
agent.recommended_schedule_cron,
|
||||
).toLowerCase()}
|
||||
</Text>
|
||||
</p>
|
||||
</div>
|
||||
) : null}
|
||||
)}
|
||||
|
||||
{agent.instructions ? (
|
||||
<div className="flex flex-col gap-4 rounded-medium border border-purple-100 bg-[#F1EBFE/5] p-4">
|
||||
<Text variant="lead-semibold" className="text-purple-600">
|
||||
Instructions
|
||||
</Text>
|
||||
|
||||
<div className="h-px w-full bg-purple-100" />
|
||||
|
||||
<Text variant="body">{agent.instructions}</Text>
|
||||
{/* Setup Instructions */}
|
||||
{agent.instructions && (
|
||||
<div className="mt-4 flex items-start gap-2">
|
||||
<InfoIcon className="mt-0.5 h-4 w-4 flex-shrink-0 text-gray-500" />
|
||||
<div className="text-sm text-gray-600">
|
||||
<strong>Setup Instructions:</strong>{" "}
|
||||
<span className="whitespace-pre-wrap">{agent.instructions}</span>
|
||||
</div>
|
||||
</div>
|
||||
) : null}
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||
import { toDisplayName } from "@/providers/agent-credentials/helper";
|
||||
import { InfoIcon } from "@phosphor-icons/react";
|
||||
import { RunAgentInputs } from "../../../RunAgentInputs/RunAgentInputs";
|
||||
import { useRunAgentModalContext } from "../../context";
|
||||
import { ModalSection } from "../ModalSection/ModalSection";
|
||||
import { WebhookTriggerBanner } from "../WebhookTriggerBanner/WebhookTriggerBanner";
|
||||
import { getCredentialTypeDisplayName } from "./helpers";
|
||||
|
||||
export function ModalRunSection() {
|
||||
const {
|
||||
agent,
|
||||
defaultRunType,
|
||||
presetName,
|
||||
setPresetName,
|
||||
@@ -21,105 +25,147 @@ export function ModalRunSection() {
|
||||
agentCredentialsInputFields,
|
||||
} = useRunAgentModalContext();
|
||||
|
||||
const inputFields = Object.entries(agentInputFields || {});
|
||||
const credentialFields = Object.entries(agentCredentialsInputFields || {});
|
||||
|
||||
return (
|
||||
<div className="flex flex-col gap-4">
|
||||
{defaultRunType === "automatic-trigger" ||
|
||||
defaultRunType === "manual-trigger" ? (
|
||||
<ModalSection
|
||||
title="Task Trigger"
|
||||
subtitle="Set up a trigger for the agent to run this task automatically"
|
||||
>
|
||||
<WebhookTriggerBanner />
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
Trigger Name
|
||||
<InformationTooltip description="Name of the trigger you are setting up" />
|
||||
</label>
|
||||
<Input
|
||||
id="trigger_name"
|
||||
label="Trigger Name"
|
||||
size="small"
|
||||
hideLabel
|
||||
value={presetName}
|
||||
placeholder="Enter trigger name"
|
||||
onChange={(e) => setPresetName(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="flex flex-col space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
Trigger Description
|
||||
<InformationTooltip description="Description of the trigger you are setting up" />
|
||||
</label>
|
||||
<Input
|
||||
id="trigger_description"
|
||||
label="Trigger Description"
|
||||
size="small"
|
||||
hideLabel
|
||||
value={presetDescription}
|
||||
placeholder="Enter trigger description"
|
||||
onChange={(e) => setPresetDescription(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="mb-10 mt-4">
|
||||
{defaultRunType === "automatic-trigger" && <WebhookTriggerBanner />}
|
||||
|
||||
{/* Preset/Trigger fields */}
|
||||
{defaultRunType === "automatic-trigger" && (
|
||||
<div className="flex flex-col gap-4">
|
||||
<div className="flex flex-col space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
Trigger Name
|
||||
<InformationTooltip description="Name of the trigger you are setting up" />
|
||||
</label>
|
||||
<Input
|
||||
id="trigger_name"
|
||||
label="Trigger Name"
|
||||
size="small"
|
||||
hideLabel
|
||||
value={presetName}
|
||||
placeholder="Enter trigger name"
|
||||
onChange={(e) => setPresetName(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</ModalSection>
|
||||
) : null}
|
||||
|
||||
{inputFields.length > 0 ? (
|
||||
<ModalSection
|
||||
title="Task Inputs"
|
||||
subtitle="Enter the information you want to provide to the agent for this task"
|
||||
>
|
||||
{/* Regular inputs */}
|
||||
{inputFields.map(([key, inputSubSchema]) => (
|
||||
<div key={key} className="flex w-full flex-col gap-0 space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
{inputSubSchema.title || key}
|
||||
<InformationTooltip description={inputSubSchema.description} />
|
||||
</label>
|
||||
|
||||
<RunAgentInputs
|
||||
schema={inputSubSchema}
|
||||
value={inputValues[key] ?? inputSubSchema.default}
|
||||
placeholder={inputSubSchema.description}
|
||||
onChange={(value) => setInputValue(key, value)}
|
||||
data-testid={`agent-input-${key}`}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</ModalSection>
|
||||
) : null}
|
||||
|
||||
{credentialFields.length > 0 ? (
|
||||
<ModalSection
|
||||
title="Task Credentials"
|
||||
subtitle="These are the credentials the agent will use to perform this task"
|
||||
>
|
||||
<div className="space-y-6">
|
||||
{Object.entries(agentCredentialsInputFields || {}).map(
|
||||
([key, inputSubSchema]) => (
|
||||
<CredentialsInput
|
||||
key={key}
|
||||
schema={
|
||||
{ ...inputSubSchema, discriminator: undefined } as any
|
||||
}
|
||||
selectedCredentials={
|
||||
(inputCredentials && inputCredentials[key]) ??
|
||||
inputSubSchema.default
|
||||
}
|
||||
onSelectCredentials={(value) =>
|
||||
setInputCredentialsValue(key, value)
|
||||
}
|
||||
siblingInputs={inputValues}
|
||||
/>
|
||||
),
|
||||
)}
|
||||
<div className="flex flex-col space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
Trigger Description
|
||||
<InformationTooltip description="Description of the trigger you are setting up" />
|
||||
</label>
|
||||
<Input
|
||||
id="trigger_description"
|
||||
label="Trigger Description"
|
||||
size="small"
|
||||
hideLabel
|
||||
value={presetDescription}
|
||||
placeholder="Enter trigger description"
|
||||
onChange={(e) => setPresetDescription(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</ModalSection>
|
||||
) : null}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Instructions */}
|
||||
{agent.instructions && (
|
||||
<div className="mb-4 flex items-start gap-2 rounded-md border border-blue-200 bg-blue-50 p-3">
|
||||
<InfoIcon className="mt-0.5 h-4 w-4 text-blue-600" />
|
||||
<div>
|
||||
<h4 className="text-sm font-medium text-blue-900">
|
||||
How to use this agent
|
||||
</h4>
|
||||
<p className="mt-1 whitespace-pre-wrap text-sm text-blue-800">
|
||||
{agent.instructions}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Credentials inputs */}
|
||||
{Object.entries(agentCredentialsInputFields || {}).map(
|
||||
([key, inputSubSchema]) => (
|
||||
<CredentialsInput
|
||||
key={key}
|
||||
schema={{ ...inputSubSchema, discriminator: undefined } as any}
|
||||
selectedCredentials={
|
||||
(inputCredentials && inputCredentials[key]) ??
|
||||
inputSubSchema.default
|
||||
}
|
||||
onSelectCredentials={(value) =>
|
||||
setInputCredentialsValue(key, value)
|
||||
}
|
||||
siblingInputs={inputValues}
|
||||
hideIfSingleCredentialAvailable={!agent.has_external_trigger}
|
||||
/>
|
||||
),
|
||||
)}
|
||||
|
||||
{/* Regular inputs */}
|
||||
{Object.entries(agentInputFields || {}).map(([key, inputSubSchema]) => (
|
||||
<div key={key} className="flex w-full flex-col gap-0 space-y-2">
|
||||
<label className="flex items-center gap-1 text-sm font-medium">
|
||||
{inputSubSchema.title || key}
|
||||
<InformationTooltip description={inputSubSchema.description} />
|
||||
</label>
|
||||
|
||||
<RunAgentInputs
|
||||
schema={inputSubSchema}
|
||||
value={inputValues[key] ?? inputSubSchema.default}
|
||||
placeholder={inputSubSchema.description}
|
||||
onChange={(value) => setInputValue(key, value)}
|
||||
data-testid={`agent-input-${key}`}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
|
||||
{/* Selected Credentials Preview */}
|
||||
{Object.keys(inputCredentials).length > 0 && (
|
||||
<div className="mt-6 flex flex-col gap-6">
|
||||
{Object.entries(agentCredentialsInputFields || {}).map(
|
||||
([key, _sub]) => {
|
||||
const credential = inputCredentials[key];
|
||||
if (!credential) return null;
|
||||
|
||||
return (
|
||||
<div key={key} className="flex flex-col gap-4">
|
||||
<Text variant="body-medium" as="h3">
|
||||
{toDisplayName(credential.provider)} credentials
|
||||
</Text>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center justify-between text-sm">
|
||||
<Text
|
||||
variant="body"
|
||||
as="span"
|
||||
className="!text-neutral-600"
|
||||
>
|
||||
Name
|
||||
</Text>
|
||||
<Text
|
||||
variant="body"
|
||||
as="span"
|
||||
className="!text-neutral-600"
|
||||
>
|
||||
{getCredentialTypeDisplayName(credential.type)}
|
||||
</Text>
|
||||
</div>
|
||||
<div className="flex items-center justify-between text-sm">
|
||||
<Text
|
||||
variant="body"
|
||||
as="span"
|
||||
className="!text-neutral-900"
|
||||
>
|
||||
{credential.title || "Untitled"}
|
||||
</Text>
|
||||
<span className="font-mono text-neutral-400">
|
||||
{"*".repeat(25)}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
interface Props {
|
||||
title: string;
|
||||
subtitle: string;
|
||||
children: React.ReactNode;
|
||||
}
|
||||
|
||||
export function ModalSection({ title, subtitle, children }: Props) {
|
||||
return (
|
||||
<div className="rounded-medium border border-zinc-200 p-6">
|
||||
<div className="mb-4 flex flex-col gap-1 border-b border-zinc-100 pb-4">
|
||||
<Text variant="lead-semibold">{title}</Text>
|
||||
<Text variant="body" className="text-zinc-500">
|
||||
{subtitle}
|
||||
</Text>
|
||||
</div>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -24,10 +24,9 @@ export function RunActions({
|
||||
disabled={!isRunReady || isExecuting || isSettingUpTrigger}
|
||||
loading={isExecuting || isSettingUpTrigger}
|
||||
>
|
||||
{defaultRunType === "automatic-trigger" ||
|
||||
defaultRunType === "manual-trigger"
|
||||
{defaultRunType === "automatic-trigger"
|
||||
? "Set up Trigger"
|
||||
: "Start Task"}
|
||||
: "Run Agent"}
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export function WebhookTriggerBanner() {
|
||||
return (
|
||||
<div className="mb-4 rounded-lg border border-blue-200 bg-blue-50 p-4">
|
||||
<div className="rounded-lg border border-blue-200 bg-blue-50 p-4">
|
||||
<div className="flex items-start">
|
||||
<div className="flex-shrink-0">
|
||||
<svg
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
import { ApiError } from "@/lib/autogpt-server-api/helpers";
|
||||
import Link from "next/link";
|
||||
import React from "react";
|
||||
|
||||
type ValidationErrorDetail = {
|
||||
type: string;
|
||||
message?: string;
|
||||
node_errors?: Record<string, Record<string, string>>;
|
||||
};
|
||||
|
||||
type AgentInfo = {
|
||||
graph_id: string;
|
||||
graph_version: number;
|
||||
};
|
||||
|
||||
export function formatValidationError(
|
||||
error: any,
|
||||
agentInfo?: AgentInfo,
|
||||
): string | React.ReactNode {
|
||||
if (
|
||||
!(error instanceof ApiError) ||
|
||||
!error.isGraphValidationError() ||
|
||||
!error.response?.detail
|
||||
) {
|
||||
return error.message || "An unexpected error occurred.";
|
||||
}
|
||||
|
||||
const detail: ValidationErrorDetail = error.response.detail;
|
||||
|
||||
// Format validation errors nicely
|
||||
if (detail.type === "validation_error" && detail.node_errors) {
|
||||
const nodeErrors = detail.node_errors;
|
||||
const errorItems: React.ReactNode[] = [];
|
||||
|
||||
// Collect all field errors
|
||||
Object.entries(nodeErrors).forEach(([nodeId, fields]) => {
|
||||
if (fields && typeof fields === "object") {
|
||||
Object.entries(fields).forEach(([fieldName, fieldError]) => {
|
||||
errorItems.push(
|
||||
<div key={`${nodeId}-${fieldName}`} className="mt-1">
|
||||
<span className="font-medium">{fieldName}:</span>{" "}
|
||||
{String(fieldError)}
|
||||
</div>,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (errorItems.length > 0) {
|
||||
return (
|
||||
<div className="space-y-1">
|
||||
<div className="font-medium text-white">
|
||||
{detail.message || "Validation failed"}
|
||||
</div>
|
||||
<div className="mt-2 space-y-1 text-xs">{errorItems}</div>
|
||||
{agentInfo && (
|
||||
<div className="mt-3 text-xs">
|
||||
Check the agent graph and try to run from there for further
|
||||
details.{" "}
|
||||
<Link
|
||||
href={`/build?flowID=${agentInfo.graph_id}&flowVersion=${agentInfo.graph_version}`}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="cursor-pointer underline hover:no-underline"
|
||||
>
|
||||
Open in builder
|
||||
</Link>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
return detail.message || "Validation failed";
|
||||
}
|
||||
}
|
||||
|
||||
return detail.message || error.message || "An unexpected error occurred.";
|
||||
}
|
||||
|
||||
export function showExecutionErrorToast(
|
||||
toast: (options: {
|
||||
title: string;
|
||||
description: string | React.ReactNode;
|
||||
variant: "destructive";
|
||||
duration: number;
|
||||
dismissable: boolean;
|
||||
}) => void,
|
||||
error: any,
|
||||
agentInfo?: AgentInfo,
|
||||
) {
|
||||
const errorMessage = formatValidationError(error, agentInfo);
|
||||
|
||||
toast({
|
||||
title: "Failed to execute agent",
|
||||
description: errorMessage,
|
||||
variant: "destructive",
|
||||
duration: 10000, // 10 seconds - long enough to read and close
|
||||
dismissable: true,
|
||||
});
|
||||
}
|
||||
@@ -1,20 +1,23 @@
|
||||
import {
|
||||
getGetV1ListGraphExecutionsQueryKey,
|
||||
usePostV1ExecuteGraphAgent,
|
||||
} from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||
import {
|
||||
getGetV2ListPresetsQueryKey,
|
||||
usePostV2SetupTrigger,
|
||||
} from "@/app/api/__generated__/endpoints/presets/presets";
|
||||
import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset";
|
||||
import { useState, useCallback, useMemo } from "react";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { isEmpty } from "@/lib/utils";
|
||||
import {
|
||||
usePostV1ExecuteGraphAgent,
|
||||
getGetV1ListGraphExecutionsInfiniteQueryOptions,
|
||||
} from "@/app/api/__generated__/endpoints/graphs/graphs";
|
||||
import {
|
||||
usePostV1CreateExecutionSchedule as useCreateSchedule,
|
||||
getGetV1ListExecutionSchedulesForAGraphQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/schedules/schedules";
|
||||
import { usePostV2SetupTrigger } from "@/app/api/__generated__/endpoints/presets/presets";
|
||||
import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
|
||||
import { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo";
|
||||
import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset";
|
||||
import { useGetV1GetUserTimezone } from "@/app/api/__generated__/endpoints/auth/auth";
|
||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||
import { analytics } from "@/services/analytics";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { showExecutionErrorToast } from "./errorHelpers";
|
||||
|
||||
export type RunVariant =
|
||||
| "manual"
|
||||
@@ -24,9 +27,8 @@ export type RunVariant =
|
||||
|
||||
interface UseAgentRunModalCallbacks {
|
||||
onRun?: (execution: GraphExecutionMeta) => void;
|
||||
onCreateSchedule?: (schedule: GraphExecutionJobInfo) => void;
|
||||
onSetupTrigger?: (preset: LibraryAgentPreset) => void;
|
||||
initialInputValues?: Record<string, any>;
|
||||
initialInputCredentials?: Record<string, any>;
|
||||
}
|
||||
|
||||
export function useAgentRunModal(
|
||||
@@ -36,28 +38,32 @@ export function useAgentRunModal(
|
||||
const { toast } = useToast();
|
||||
const queryClient = useQueryClient();
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const [inputValues, setInputValues] = useState<Record<string, any>>(
|
||||
callbacks?.initialInputValues || {},
|
||||
);
|
||||
const [showScheduleView, setShowScheduleView] = useState(false);
|
||||
const [inputValues, setInputValues] = useState<Record<string, any>>({});
|
||||
const [inputCredentials, setInputCredentials] = useState<Record<string, any>>(
|
||||
callbacks?.initialInputCredentials || {},
|
||||
{},
|
||||
);
|
||||
const [presetName, setPresetName] = useState<string>("");
|
||||
const [presetDescription, setPresetDescription] = useState<string>("");
|
||||
const defaultScheduleName = useMemo(() => `Run ${agent.name}`, [agent.name]);
|
||||
const [scheduleName, setScheduleName] = useState(defaultScheduleName);
|
||||
const [cronExpression, setCronExpression] = useState(
|
||||
agent.recommended_schedule_cron || "0 9 * * 1",
|
||||
);
|
||||
const { completeStep: completeOnboardingStep } = useOnboarding();
|
||||
|
||||
// Get user timezone for scheduling
|
||||
const { data: userTimezone } = useGetV1GetUserTimezone({
|
||||
query: {
|
||||
select: (res) => (res.status === 200 ? res.data.timezone : undefined),
|
||||
},
|
||||
});
|
||||
|
||||
// Determine the default run type based on agent capabilities
|
||||
const defaultRunType: RunVariant = agent.trigger_setup_info
|
||||
? agent.trigger_setup_info.credentials_input_name
|
||||
? "automatic-trigger"
|
||||
: "manual-trigger"
|
||||
const defaultRunType: RunVariant = agent.has_external_trigger
|
||||
? "automatic-trigger"
|
||||
: "manual";
|
||||
|
||||
// Update input values/credentials if template is selected/unselected
|
||||
useEffect(() => {
|
||||
setInputValues(callbacks?.initialInputValues || {});
|
||||
setInputCredentials(callbacks?.initialInputCredentials || {});
|
||||
}, [callbacks?.initialInputValues, callbacks?.initialInputCredentials]);
|
||||
|
||||
// API mutations
|
||||
const executeGraphMutation = usePostV1ExecuteGraphAgent({
|
||||
mutation: {
|
||||
@@ -66,11 +72,13 @@ export function useAgentRunModal(
|
||||
toast({
|
||||
title: "Agent execution started",
|
||||
});
|
||||
callbacks?.onRun?.(response.data as unknown as GraphExecutionMeta);
|
||||
// Invalidate runs list for this graph
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV1ListGraphExecutionsQueryKey(agent.graph_id),
|
||||
queryKey: getGetV1ListGraphExecutionsInfiniteQueryOptions(
|
||||
agent.graph_id,
|
||||
).queryKey,
|
||||
});
|
||||
callbacks?.onRun?.(response.data);
|
||||
analytics.sendDatafastEvent("run_agent", {
|
||||
name: agent.name,
|
||||
id: agent.graph_id,
|
||||
@@ -79,9 +87,46 @@ export function useAgentRunModal(
|
||||
}
|
||||
},
|
||||
onError: (error: any) => {
|
||||
showExecutionErrorToast(toast, error, {
|
||||
graph_id: agent.graph_id,
|
||||
graph_version: agent.graph_version,
|
||||
const errorMessage = error.isGraphValidationError()
|
||||
? error.response.detail.message
|
||||
: error.message;
|
||||
|
||||
toast({
|
||||
title: "❌ Failed to execute agent",
|
||||
description: errorMessage || "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const createScheduleMutation = useCreateSchedule({
|
||||
mutation: {
|
||||
onSuccess: (response) => {
|
||||
if (response.status === 200) {
|
||||
toast({
|
||||
title: "Schedule created",
|
||||
});
|
||||
callbacks?.onCreateSchedule?.(response.data);
|
||||
// Invalidate schedules list for this graph
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV1ListExecutionSchedulesForAGraphQueryKey(
|
||||
agent.graph_id,
|
||||
),
|
||||
});
|
||||
analytics.sendDatafastEvent("schedule_agent", {
|
||||
name: agent.name,
|
||||
id: agent.graph_id,
|
||||
cronExpression: cronExpression,
|
||||
});
|
||||
setIsOpen(false);
|
||||
}
|
||||
},
|
||||
onError: (error: any) => {
|
||||
toast({
|
||||
title: "❌ Failed to create schedule",
|
||||
description: error.message || "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
@@ -89,14 +134,11 @@ export function useAgentRunModal(
|
||||
|
||||
const setupTriggerMutation = usePostV2SetupTrigger({
|
||||
mutation: {
|
||||
onSuccess: (response) => {
|
||||
onSuccess: (response: any) => {
|
||||
if (response.status === 200) {
|
||||
toast({
|
||||
title: "Trigger setup complete",
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListPresetsQueryKey({ graph_id: agent.graph_id }),
|
||||
});
|
||||
callbacks?.onSetupTrigger?.(response.data);
|
||||
setIsOpen(false);
|
||||
}
|
||||
@@ -111,13 +153,11 @@ export function useAgentRunModal(
|
||||
},
|
||||
});
|
||||
|
||||
// Input schema validation (use trigger schema for triggered agents)
|
||||
const agentInputSchema = useMemo(() => {
|
||||
if (agent.trigger_setup_info?.config_schema) {
|
||||
return agent.trigger_setup_info.config_schema;
|
||||
}
|
||||
return agent.input_schema || { properties: {}, required: [] };
|
||||
}, [agent.input_schema, agent.trigger_setup_info]);
|
||||
// Input schema validation
|
||||
const agentInputSchema = useMemo(
|
||||
() => agent.input_schema || { properties: {}, required: [] },
|
||||
[agent.input_schema],
|
||||
);
|
||||
|
||||
const agentInputFields = useMemo(() => {
|
||||
if (
|
||||
@@ -186,25 +226,33 @@ export function useAgentRunModal(
|
||||
[allRequiredInputsAreSetRaw, credentialsRequired, allCredentialsAreSet],
|
||||
);
|
||||
|
||||
const notifyMissingRequirements = useCallback(() => {
|
||||
const allMissingFields = missingInputs.concat(
|
||||
credentialsRequired && !allCredentialsAreSet
|
||||
? missingCredentials.map((k) => `credentials:${k}`)
|
||||
: [],
|
||||
);
|
||||
const notifyMissingRequirements = useCallback(
|
||||
(needScheduleName: boolean = false) => {
|
||||
const allMissingFields = (
|
||||
needScheduleName && !scheduleName ? ["schedule_name"] : []
|
||||
)
|
||||
.concat(missingInputs)
|
||||
.concat(
|
||||
credentialsRequired && !allCredentialsAreSet
|
||||
? missingCredentials.map((k) => `credentials:${k}`)
|
||||
: [],
|
||||
);
|
||||
|
||||
toast({
|
||||
title: "⚠️ Missing required inputs",
|
||||
description: `Please provide: ${allMissingFields.map((k) => `"${k}"`).join(", ")}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
}, [
|
||||
missingInputs,
|
||||
toast,
|
||||
credentialsRequired,
|
||||
allCredentialsAreSet,
|
||||
missingCredentials,
|
||||
]);
|
||||
toast({
|
||||
title: "⚠️ Missing required inputs",
|
||||
description: `Please provide: ${allMissingFields.map((k) => `"${k}"`).join(", ")}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
[
|
||||
missingInputs,
|
||||
scheduleName,
|
||||
toast,
|
||||
credentialsRequired,
|
||||
allCredentialsAreSet,
|
||||
missingCredentials,
|
||||
],
|
||||
);
|
||||
|
||||
// Action handlers
|
||||
const handleRun = useCallback(() => {
|
||||
@@ -213,12 +261,9 @@ export function useAgentRunModal(
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
defaultRunType === "automatic-trigger" ||
|
||||
defaultRunType === "manual-trigger"
|
||||
) {
|
||||
if (defaultRunType === "automatic-trigger") {
|
||||
// Setup trigger
|
||||
if (!presetName.trim()) {
|
||||
if (!scheduleName.trim()) {
|
||||
toast({
|
||||
title: "⚠️ Trigger name required",
|
||||
description: "Please provide a name for your trigger.",
|
||||
@@ -229,7 +274,7 @@ export function useAgentRunModal(
|
||||
|
||||
setupTriggerMutation.mutate({
|
||||
data: {
|
||||
name: presetName,
|
||||
name: presetName || scheduleName,
|
||||
description: presetDescription || `Trigger for ${agent.name}`,
|
||||
graph_id: agent.graph_id,
|
||||
graph_version: agent.graph_version,
|
||||
@@ -245,13 +290,13 @@ export function useAgentRunModal(
|
||||
data: {
|
||||
inputs: inputValues,
|
||||
credentials_inputs: inputCredentials,
|
||||
source: "library",
|
||||
},
|
||||
});
|
||||
}
|
||||
}, [
|
||||
allRequiredInputsAreSet,
|
||||
defaultRunType,
|
||||
scheduleName,
|
||||
inputValues,
|
||||
inputCredentials,
|
||||
agent,
|
||||
@@ -263,6 +308,72 @@ export function useAgentRunModal(
|
||||
toast,
|
||||
]);
|
||||
|
||||
const handleSchedule = useCallback(() => {
|
||||
if (!allRequiredInputsAreSet) {
|
||||
notifyMissingRequirements(true);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!scheduleName.trim()) {
|
||||
toast({
|
||||
title: "⚠️ Schedule name required",
|
||||
description: "Please provide a name for your schedule.",
|
||||
variant: "destructive",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
createScheduleMutation.mutate({
|
||||
graphId: agent.graph_id,
|
||||
data: {
|
||||
name: presetName || scheduleName,
|
||||
cron: cronExpression,
|
||||
inputs: inputValues,
|
||||
graph_version: agent.graph_version,
|
||||
credentials: inputCredentials,
|
||||
timezone:
|
||||
userTimezone && userTimezone !== "not-set" ? userTimezone : undefined,
|
||||
},
|
||||
});
|
||||
|
||||
completeOnboardingStep("SCHEDULE_AGENT");
|
||||
}, [
|
||||
allRequiredInputsAreSet,
|
||||
scheduleName,
|
||||
cronExpression,
|
||||
inputValues,
|
||||
inputCredentials,
|
||||
agent,
|
||||
notifyMissingRequirements,
|
||||
createScheduleMutation,
|
||||
toast,
|
||||
userTimezone,
|
||||
]);
|
||||
|
||||
function handleShowSchedule() {
|
||||
// Initialize with sensible defaults when entering schedule view
|
||||
setScheduleName((prev) => prev || defaultScheduleName);
|
||||
setCronExpression(
|
||||
(prev) => prev || agent.recommended_schedule_cron || "0 9 * * 1",
|
||||
);
|
||||
setShowScheduleView(true);
|
||||
}
|
||||
|
||||
function handleGoBack() {
|
||||
setShowScheduleView(false);
|
||||
// Reset schedule fields on exit
|
||||
setScheduleName(defaultScheduleName);
|
||||
setCronExpression(agent.recommended_schedule_cron || "0 9 * * 1");
|
||||
}
|
||||
|
||||
function handleSetScheduleName(name: string) {
|
||||
setScheduleName(name);
|
||||
}
|
||||
|
||||
function handleSetCronExpression(expression: string) {
|
||||
setCronExpression(expression);
|
||||
}
|
||||
|
||||
const hasInputFields = useMemo(() => {
|
||||
return Object.keys(agentInputFields).length > 0;
|
||||
}, [agentInputFields]);
|
||||
@@ -271,9 +382,10 @@ export function useAgentRunModal(
|
||||
// UI state
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
showScheduleView,
|
||||
|
||||
// Run mode
|
||||
defaultRunType: defaultRunType as RunVariant,
|
||||
defaultRunType,
|
||||
|
||||
// Form: regular inputs
|
||||
inputValues,
|
||||
@@ -289,6 +401,10 @@ export function useAgentRunModal(
|
||||
setPresetName,
|
||||
setPresetDescription,
|
||||
|
||||
// Scheduling
|
||||
scheduleName,
|
||||
cronExpression,
|
||||
|
||||
// Validation/readiness
|
||||
allRequiredInputsAreSet,
|
||||
missingInputs,
|
||||
@@ -300,9 +416,15 @@ export function useAgentRunModal(
|
||||
|
||||
// Async states
|
||||
isExecuting: executeGraphMutation.isPending,
|
||||
isCreatingSchedule: createScheduleMutation.isPending,
|
||||
isSettingUpTrigger: setupTriggerMutation.isPending,
|
||||
|
||||
// Actions
|
||||
handleRun,
|
||||
handleSchedule,
|
||||
handleShowSchedule,
|
||||
handleGoBack,
|
||||
handleSetScheduleName,
|
||||
handleSetCronExpression,
|
||||
};
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user