mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 16:48:06 -05:00
Compare commits
2 Commits
contributo
...
docker-qol
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
52de22469f | ||
|
|
c27f163623 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -13,9 +13,9 @@ name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
branches: [ "master", "release-*" ]
|
||||
pull_request:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
branches: [ "master", "release-*" ]
|
||||
schedule:
|
||||
- cron: '15 4 * * 0'
|
||||
|
||||
|
||||
2
autogpt_platform/.gitignore
vendored
2
autogpt_platform/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
*.ignore.*
|
||||
*.ign.*
|
||||
5
autogpt_platform/backend/.gitignore
vendored
5
autogpt_platform/backend/.gitignore
vendored
@@ -5,7 +5,4 @@ dev.db-journal
|
||||
build/
|
||||
config.json
|
||||
secrets/*
|
||||
!secrets/.gitkeep
|
||||
|
||||
*.ignore.*
|
||||
*.ign.*
|
||||
!secrets/.gitkeep
|
||||
@@ -3,12 +3,12 @@ import time
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -217,6 +217,7 @@ class AIShortformVideoCreatorBlock(Block):
|
||||
url = "https://webhook.site/token"
|
||||
headers = {"Accept": "application/json", "Content-Type": "application/json"}
|
||||
response = requests.post(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
webhook_data = response.json()
|
||||
return webhook_data["uuid"], f"https://webhook.site/{webhook_data['uuid']}"
|
||||
|
||||
@@ -227,12 +228,14 @@ class AIShortformVideoCreatorBlock(Block):
|
||||
logger.debug(
|
||||
f"API Response Status Code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def check_video_status(self, api_key: SecretStr, pid: str) -> dict:
|
||||
url = f"https://www.revid.ai/api/public/v2/status?pid={pid}"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def wait_for_video(
|
||||
|
||||
@@ -148,12 +148,9 @@ class AgentInputBlock(Block):
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description: str = SchemaField(
|
||||
description="The description of the input.",
|
||||
default=None,
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: List[Any] = SchemaField(
|
||||
@@ -166,16 +163,6 @@ class AgentInputBlock(Block):
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to show the input in the advanced section, if the field is not required.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the input should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: Any = SchemaField(description="The value passed as input.")
|
||||
@@ -208,7 +195,6 @@ class AgentInputBlock(Block):
|
||||
],
|
||||
categories={BlockCategory.INPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.INPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
@@ -219,25 +205,28 @@ class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
Attributes:
|
||||
recorded_value: The value to be recorded as output.
|
||||
name: The name of the output.
|
||||
description: The description of the output.
|
||||
fmt_string: The format string to be used to format the recorded_value.
|
||||
|
||||
Outputs:
|
||||
output: The formatted recorded_value if fmt_string is provided and the recorded_value
|
||||
can be formatted, otherwise the raw recorded_value.
|
||||
|
||||
Behavior:
|
||||
If `format` is provided and the `value` is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the `format`.
|
||||
If formatting fails or no `format` is provided, the raw `value` is output.
|
||||
If fmt_string is provided and the recorded_value is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the fmt_string.
|
||||
If formatting fails or no fmt_string is provided, the raw recorded_value is output.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to be recorded as output.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
value: Any = SchemaField(description="The value to be recorded as output.")
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description: str = SchemaField(
|
||||
description="The description of the output.",
|
||||
default=None,
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
@@ -245,16 +234,6 @@ class AgentOutputBlock(Block):
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to treat the output as advanced.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the output should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
@@ -292,7 +271,6 @@ class AgentOutputBlock(Block):
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.OUTPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
|
||||
@@ -71,18 +71,11 @@ class ConditionBlock(Block):
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
operator = input_data.operator
|
||||
|
||||
value1 = input_data.value1
|
||||
if isinstance(value1, str):
|
||||
value1 = float(value1.strip())
|
||||
|
||||
operator = input_data.operator
|
||||
value2 = input_data.value2
|
||||
if isinstance(value2, str):
|
||||
value2 = float(value2.strip())
|
||||
|
||||
yes_value = input_data.yes_value if input_data.yes_value is not None else value1
|
||||
no_value = input_data.no_value if input_data.no_value is not None else value2
|
||||
no_value = input_data.no_value if input_data.no_value is not None else value1
|
||||
|
||||
comparison_funcs = {
|
||||
ComparisonOperator.EQUAL: lambda a, b: a == b,
|
||||
@@ -93,11 +86,17 @@ class ConditionBlock(Block):
|
||||
ComparisonOperator.LESS_THAN_OR_EQUAL: lambda a, b: a <= b,
|
||||
}
|
||||
|
||||
result = comparison_funcs[operator](value1, value2)
|
||||
try:
|
||||
result = comparison_funcs[operator](value1, value2)
|
||||
|
||||
yield "result", result
|
||||
yield "result", result
|
||||
|
||||
if result:
|
||||
yield "yes_output", yes_value
|
||||
else:
|
||||
yield "no_output", no_value
|
||||
if result:
|
||||
yield "yes_output", yes_value
|
||||
else:
|
||||
yield "no_output", no_value
|
||||
|
||||
except Exception:
|
||||
yield "result", None
|
||||
yield "yes_output", None
|
||||
yield "no_output", None
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from backend.blocks.github._auth import GithubCredentials
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
def _convert_to_api_url(url: str) -> str:
|
||||
"""
|
||||
Converts a standard GitHub URL to the corresponding GitHub API URL.
|
||||
Handles repository URLs, issue URLs, pull request URLs, and more.
|
||||
"""
|
||||
parsed_url = urlparse(url)
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
|
||||
if len(path_parts) >= 2:
|
||||
owner, repo = path_parts[0], path_parts[1]
|
||||
api_base = f"https://api.github.com/repos/{owner}/{repo}"
|
||||
|
||||
if len(path_parts) > 2:
|
||||
additional_path = "/".join(path_parts[2:])
|
||||
api_url = f"{api_base}/{additional_path}"
|
||||
else:
|
||||
# Repository base URL
|
||||
api_url = api_base
|
||||
else:
|
||||
raise ValueError("Invalid GitHub URL format.")
|
||||
|
||||
return api_url
|
||||
|
||||
|
||||
def _get_headers(credentials: GithubCredentials) -> dict[str, str]:
|
||||
return {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
|
||||
def get_api(credentials: GithubCredentials) -> Requests:
|
||||
return Requests(
|
||||
trusted_origins=["https://api.github.com", "https://github.com"],
|
||||
extra_url_validator=_convert_to_api_url,
|
||||
extra_headers=_get_headers(credentials),
|
||||
)
|
||||
@@ -1,11 +1,9 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
@@ -15,10 +13,6 @@ from ._auth import (
|
||||
)
|
||||
|
||||
|
||||
def is_github_url(url: str) -> bool:
|
||||
return urlparse(url).netloc == "github.com"
|
||||
|
||||
|
||||
# --8<-- [start:GithubCommentBlockExample]
|
||||
class GithubCommentBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
@@ -68,10 +62,27 @@ class GithubCommentBlock(Block):
|
||||
def post_comment(
|
||||
credentials: GithubCredentials, issue_url: str, body_text: str
|
||||
) -> tuple[int, str]:
|
||||
api = get_api(credentials)
|
||||
if "/pull/" in issue_url:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/issues/"
|
||||
)
|
||||
+ "/comments"
|
||||
)
|
||||
else:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos") + "/comments"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"body": body_text}
|
||||
comments_url = issue_url + "/comments"
|
||||
response = api.post(comments_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
comment = response.json()
|
||||
return comment["id"], comment["html_url"]
|
||||
|
||||
@@ -145,10 +156,16 @@ class GithubMakeIssueBlock(Block):
|
||||
def create_issue(
|
||||
credentials: GithubCredentials, repo_url: str, title: str, body: str
|
||||
) -> tuple[int, str]:
|
||||
api = get_api(credentials)
|
||||
api_url = repo_url.replace("github.com", "api.github.com/repos") + "/issues"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"title": title, "body": body}
|
||||
issues_url = repo_url + "/issues"
|
||||
response = api.post(issues_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
issue = response.json()
|
||||
return issue["number"], issue["html_url"]
|
||||
|
||||
@@ -215,12 +232,21 @@ class GithubReadIssueBlock(Block):
|
||||
def read_issue(
|
||||
credentials: GithubCredentials, issue_url: str
|
||||
) -> tuple[str, str, str]:
|
||||
api = get_api(credentials)
|
||||
response = api.get(issue_url)
|
||||
api_url = issue_url.replace("github.com", "api.github.com/repos")
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
title = data.get("title", "No title found")
|
||||
body = data.get("body", "No body content found")
|
||||
user = data.get("user", {}).get("login", "No user found")
|
||||
|
||||
return title, body, user
|
||||
|
||||
def run(
|
||||
@@ -292,13 +318,20 @@ class GithubListIssuesBlock(Block):
|
||||
def list_issues(
|
||||
credentials: GithubCredentials, repo_url: str
|
||||
) -> list[Output.IssueItem]:
|
||||
api = get_api(credentials)
|
||||
issues_url = repo_url + "/issues"
|
||||
response = api.get(issues_url)
|
||||
api_url = repo_url.replace("github.com", "api.github.com/repos") + "/issues"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
issues: list[GithubListIssuesBlock.Output.IssueItem] = [
|
||||
{"title": issue["title"], "url": issue["html_url"]} for issue in data
|
||||
]
|
||||
|
||||
return issues
|
||||
|
||||
def run(
|
||||
@@ -352,10 +385,28 @@ class GithubAddLabelBlock(Block):
|
||||
|
||||
@staticmethod
|
||||
def add_label(credentials: GithubCredentials, issue_url: str, label: str) -> str:
|
||||
api = get_api(credentials)
|
||||
# Convert the provided GitHub URL to the API URL
|
||||
if "/pull/" in issue_url:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/issues/"
|
||||
)
|
||||
+ "/labels"
|
||||
)
|
||||
else:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos") + "/labels"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"labels": [label]}
|
||||
labels_url = issue_url + "/labels"
|
||||
api.post(labels_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Label added successfully"
|
||||
|
||||
def run(
|
||||
@@ -412,9 +463,31 @@ class GithubRemoveLabelBlock(Block):
|
||||
|
||||
@staticmethod
|
||||
def remove_label(credentials: GithubCredentials, issue_url: str, label: str) -> str:
|
||||
api = get_api(credentials)
|
||||
label_url = issue_url + f"/labels/{label}"
|
||||
api.delete(label_url)
|
||||
# Convert the provided GitHub URL to the API URL
|
||||
if "/pull/" in issue_url:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/issues/"
|
||||
)
|
||||
+ f"/labels/{label}"
|
||||
)
|
||||
else:
|
||||
api_url = (
|
||||
issue_url.replace("github.com", "api.github.com/repos")
|
||||
+ f"/labels/{label}"
|
||||
)
|
||||
|
||||
# Log the constructed API URL for debugging
|
||||
print(f"Constructed API URL: {api_url}")
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.delete(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Label removed successfully"
|
||||
|
||||
def run(
|
||||
@@ -477,10 +550,23 @@ class GithubAssignIssueBlock(Block):
|
||||
issue_url: str,
|
||||
assignee: str,
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
assignees_url = issue_url + "/assignees"
|
||||
# Extracting repo path and issue number from the issue URL
|
||||
repo_path, issue_number = issue_url.replace("https://github.com/", "").split(
|
||||
"/issues/"
|
||||
)
|
||||
api_url = (
|
||||
f"https://api.github.com/repos/{repo_path}/issues/{issue_number}/assignees"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"assignees": [assignee]}
|
||||
api.post(assignees_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Issue assigned successfully"
|
||||
|
||||
def run(
|
||||
@@ -543,10 +629,23 @@ class GithubUnassignIssueBlock(Block):
|
||||
issue_url: str,
|
||||
assignee: str,
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
assignees_url = issue_url + "/assignees"
|
||||
# Extracting repo path and issue number from the issue URL
|
||||
repo_path, issue_number = issue_url.replace("https://github.com/", "").split(
|
||||
"/issues/"
|
||||
)
|
||||
api_url = (
|
||||
f"https://api.github.com/repos/{repo_path}/issues/{issue_number}/assignees"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"assignees": [assignee]}
|
||||
api.delete(assignees_url, json=data)
|
||||
|
||||
response = requests.delete(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Issue unassigned successfully"
|
||||
|
||||
def run(
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import requests
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
@@ -64,13 +64,20 @@ class GithubListPullRequestsBlock(Block):
|
||||
|
||||
@staticmethod
|
||||
def list_prs(credentials: GithubCredentials, repo_url: str) -> list[Output.PRItem]:
|
||||
api = get_api(credentials)
|
||||
pulls_url = repo_url + "/pulls"
|
||||
response = api.get(pulls_url)
|
||||
api_url = repo_url.replace("github.com", "api.github.com/repos") + "/pulls"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
pull_requests: list[GithubListPullRequestsBlock.Output.PRItem] = [
|
||||
{"title": pr["title"], "url": pr["html_url"]} for pr in data
|
||||
]
|
||||
|
||||
return pull_requests
|
||||
|
||||
def run(
|
||||
@@ -103,11 +110,7 @@ class GithubMakePullRequestBlock(Block):
|
||||
placeholder="Enter the pull request body",
|
||||
)
|
||||
head: str = SchemaField(
|
||||
description=(
|
||||
"The name of the branch where your changes are implemented. "
|
||||
"For cross-repository pull requests in the same network, "
|
||||
"namespace head with a user like this: username:branch."
|
||||
),
|
||||
description="The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace head with a user like this: username:branch.",
|
||||
placeholder="Enter the head branch",
|
||||
)
|
||||
base: str = SchemaField(
|
||||
@@ -159,10 +162,17 @@ class GithubMakePullRequestBlock(Block):
|
||||
head: str,
|
||||
base: str,
|
||||
) -> tuple[int, str]:
|
||||
api = get_api(credentials)
|
||||
pulls_url = repo_url + "/pulls"
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/pulls"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"title": title, "body": body, "head": head, "base": base}
|
||||
response = api.post(pulls_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
pr_data = response.json()
|
||||
return pr_data["number"], pr_data["html_url"]
|
||||
|
||||
@@ -184,8 +194,13 @@ class GithubMakePullRequestBlock(Block):
|
||||
)
|
||||
yield "number", number
|
||||
yield "url", url
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
except requests.exceptions.HTTPError as http_err:
|
||||
if http_err.response.status_code == 422:
|
||||
error_details = http_err.response.json()
|
||||
error_message = error_details.get("message", "Unknown error")
|
||||
else:
|
||||
error_message = str(http_err)
|
||||
raise RuntimeError(f"Failed to create pull request: {error_message}")
|
||||
|
||||
|
||||
class GithubReadPullRequestBlock(Block):
|
||||
@@ -240,21 +255,42 @@ class GithubReadPullRequestBlock(Block):
|
||||
|
||||
@staticmethod
|
||||
def read_pr(credentials: GithubCredentials, pr_url: str) -> tuple[str, str, str]:
|
||||
api = get_api(credentials)
|
||||
# Adjust the URL to access the issue endpoint for PR metadata
|
||||
issue_url = pr_url.replace("/pull/", "/issues/")
|
||||
response = api.get(issue_url)
|
||||
api_url = pr_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/issues/"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
title = data.get("title", "No title found")
|
||||
body = data.get("body", "No body content found")
|
||||
author = data.get("user", {}).get("login", "No user found")
|
||||
|
||||
return title, body, author
|
||||
|
||||
@staticmethod
|
||||
def read_pr_changes(credentials: GithubCredentials, pr_url: str) -> str:
|
||||
api = get_api(credentials)
|
||||
files_url = pr_url + "/files"
|
||||
response = api.get(files_url)
|
||||
api_url = (
|
||||
pr_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/pulls/"
|
||||
)
|
||||
+ "/files"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
files = response.json()
|
||||
changes = []
|
||||
for file in files:
|
||||
@@ -262,6 +298,7 @@ class GithubReadPullRequestBlock(Block):
|
||||
patch = file.get("patch")
|
||||
if filename and patch:
|
||||
changes.append(f"File: {filename}\n{patch}")
|
||||
|
||||
return "\n\n".join(changes)
|
||||
|
||||
def run(
|
||||
@@ -330,10 +367,23 @@ class GithubAssignPRReviewerBlock(Block):
|
||||
def assign_reviewer(
|
||||
credentials: GithubCredentials, pr_url: str, reviewer: str
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
# Convert the PR URL to the appropriate API endpoint
|
||||
api_url = (
|
||||
pr_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/pulls/"
|
||||
)
|
||||
+ "/requested_reviewers"
|
||||
)
|
||||
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"reviewers": [reviewer]}
|
||||
api.post(reviewers_url, json=data)
|
||||
|
||||
response = requests.post(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Reviewer assigned successfully"
|
||||
|
||||
def run(
|
||||
@@ -350,8 +400,17 @@ class GithubAssignPRReviewerBlock(Block):
|
||||
input_data.reviewer,
|
||||
)
|
||||
yield "status", status
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
except requests.exceptions.HTTPError as http_err:
|
||||
if http_err.response.status_code == 422:
|
||||
error_msg = (
|
||||
"Failed to assign reviewer: "
|
||||
f"The reviewer '{input_data.reviewer}' may not have permission "
|
||||
"or the pull request is not in a valid state. "
|
||||
f"Detailed error: {http_err.response.text}"
|
||||
)
|
||||
else:
|
||||
error_msg = f"HTTP error: {http_err} - {http_err.response.text}"
|
||||
raise RuntimeError(error_msg)
|
||||
|
||||
|
||||
class GithubUnassignPRReviewerBlock(Block):
|
||||
@@ -397,10 +456,21 @@ class GithubUnassignPRReviewerBlock(Block):
|
||||
def unassign_reviewer(
|
||||
credentials: GithubCredentials, pr_url: str, reviewer: str
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
api_url = (
|
||||
pr_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/pulls/"
|
||||
)
|
||||
+ "/requested_reviewers"
|
||||
)
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
data = {"reviewers": [reviewer]}
|
||||
api.delete(reviewers_url, json=data)
|
||||
|
||||
response = requests.delete(api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Reviewer unassigned successfully"
|
||||
|
||||
def run(
|
||||
@@ -410,15 +480,12 @@ class GithubUnassignPRReviewerBlock(Block):
|
||||
credentials: GithubCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
status = self.unassign_reviewer(
|
||||
credentials,
|
||||
input_data.pr_url,
|
||||
input_data.reviewer,
|
||||
)
|
||||
yield "status", status
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
status = self.unassign_reviewer(
|
||||
credentials,
|
||||
input_data.pr_url,
|
||||
input_data.reviewer,
|
||||
)
|
||||
yield "status", status
|
||||
|
||||
|
||||
class GithubListPRReviewersBlock(Block):
|
||||
@@ -477,14 +544,26 @@ class GithubListPRReviewersBlock(Block):
|
||||
def list_reviewers(
|
||||
credentials: GithubCredentials, pr_url: str
|
||||
) -> list[Output.ReviewerItem]:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
response = api.get(reviewers_url)
|
||||
api_url = (
|
||||
pr_url.replace("github.com", "api.github.com/repos").replace(
|
||||
"/pull/", "/pulls/"
|
||||
)
|
||||
+ "/requested_reviewers"
|
||||
)
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
reviewers: list[GithubListPRReviewersBlock.Output.ReviewerItem] = [
|
||||
{"username": reviewer["login"], "url": reviewer["html_url"]}
|
||||
for reviewer in data.get("users", [])
|
||||
]
|
||||
|
||||
return reviewers
|
||||
|
||||
def run(
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import base64
|
||||
|
||||
import requests
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
@@ -68,11 +68,17 @@ class GithubListTagsBlock(Block):
|
||||
def list_tags(
|
||||
credentials: GithubCredentials, repo_url: str
|
||||
) -> list[Output.TagItem]:
|
||||
api = get_api(credentials)
|
||||
tags_url = repo_url + "/tags"
|
||||
response = api.get(tags_url)
|
||||
data = response.json()
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/tags"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
tags: list[GithubListTagsBlock.Output.TagItem] = [
|
||||
{
|
||||
"name": tag["name"],
|
||||
@@ -80,6 +86,7 @@ class GithubListTagsBlock(Block):
|
||||
}
|
||||
for tag in data
|
||||
]
|
||||
|
||||
return tags
|
||||
|
||||
def run(
|
||||
@@ -150,18 +157,20 @@ class GithubListBranchesBlock(Block):
|
||||
def list_branches(
|
||||
credentials: GithubCredentials, repo_url: str
|
||||
) -> list[Output.BranchItem]:
|
||||
api = get_api(credentials)
|
||||
branches_url = repo_url + "/branches"
|
||||
response = api.get(branches_url)
|
||||
api_url = repo_url.replace("github.com", "api.github.com/repos") + "/branches"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
branches: list[GithubListBranchesBlock.Output.BranchItem] = [
|
||||
{
|
||||
"name": branch["name"],
|
||||
"url": f"https://github.com/{repo_path}/tree/{branch['name']}",
|
||||
}
|
||||
for branch in data
|
||||
{"name": branch["name"], "url": branch["commit"]["url"]} for branch in data
|
||||
]
|
||||
|
||||
return branches
|
||||
|
||||
def run(
|
||||
@@ -237,8 +246,6 @@ class GithubListDiscussionsBlock(Block):
|
||||
def list_discussions(
|
||||
credentials: GithubCredentials, repo_url: str, num_discussions: int
|
||||
) -> list[Output.DiscussionItem]:
|
||||
api = get_api(credentials)
|
||||
# GitHub GraphQL API endpoint is different; we'll use api.post with custom URL
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
owner, repo = repo_path.split("/")
|
||||
query = """
|
||||
@@ -254,15 +261,24 @@ class GithubListDiscussionsBlock(Block):
|
||||
}
|
||||
"""
|
||||
variables = {"owner": owner, "repo": repo, "num": num_discussions}
|
||||
response = api.post(
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
"https://api.github.com/graphql",
|
||||
json={"query": query, "variables": variables},
|
||||
headers=headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
discussions: list[GithubListDiscussionsBlock.Output.DiscussionItem] = [
|
||||
{"title": discussion["title"], "url": discussion["url"]}
|
||||
for discussion in data["data"]["repository"]["discussions"]["nodes"]
|
||||
]
|
||||
|
||||
return discussions
|
||||
|
||||
def run(
|
||||
@@ -332,13 +348,21 @@ class GithubListReleasesBlock(Block):
|
||||
def list_releases(
|
||||
credentials: GithubCredentials, repo_url: str
|
||||
) -> list[Output.ReleaseItem]:
|
||||
api = get_api(credentials)
|
||||
releases_url = repo_url + "/releases"
|
||||
response = api.get(releases_url)
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/releases"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
data = response.json()
|
||||
releases: list[GithubListReleasesBlock.Output.ReleaseItem] = [
|
||||
{"name": release["name"], "url": release["html_url"]} for release in data
|
||||
]
|
||||
|
||||
return releases
|
||||
|
||||
def run(
|
||||
@@ -408,9 +432,16 @@ class GithubReadFileBlock(Block):
|
||||
def read_file(
|
||||
credentials: GithubCredentials, repo_url: str, file_path: str, branch: str
|
||||
) -> tuple[str, int]:
|
||||
api = get_api(credentials)
|
||||
content_url = repo_url + f"/contents/{file_path}?ref={branch}"
|
||||
response = api.get(content_url)
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/contents/{file_path}?ref={branch}"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
content = response.json()
|
||||
|
||||
if isinstance(content, list):
|
||||
@@ -518,33 +549,46 @@ class GithubReadFolderBlock(Block):
|
||||
def read_folder(
|
||||
credentials: GithubCredentials, repo_url: str, folder_path: str, branch: str
|
||||
) -> tuple[list[Output.FileEntry], list[Output.DirEntry]]:
|
||||
api = get_api(credentials)
|
||||
contents_url = repo_url + f"/contents/{folder_path}?ref={branch}"
|
||||
response = api.get(contents_url)
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/contents/{folder_path}?ref={branch}"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
content = response.json()
|
||||
|
||||
if not isinstance(content, list):
|
||||
if isinstance(content, list):
|
||||
# Multiple entries of different types exist at this path
|
||||
if not (dir := next((d for d in content if d["type"] == "dir"), None)):
|
||||
raise TypeError("Not a folder")
|
||||
content = dir
|
||||
|
||||
if content["type"] != "dir":
|
||||
raise TypeError("Not a folder")
|
||||
|
||||
files = [
|
||||
GithubReadFolderBlock.Output.FileEntry(
|
||||
name=entry["name"],
|
||||
path=entry["path"],
|
||||
size=entry["size"],
|
||||
)
|
||||
for entry in content
|
||||
if entry["type"] == "file"
|
||||
]
|
||||
dirs = [
|
||||
GithubReadFolderBlock.Output.DirEntry(
|
||||
name=entry["name"],
|
||||
path=entry["path"],
|
||||
)
|
||||
for entry in content
|
||||
if entry["type"] == "dir"
|
||||
]
|
||||
|
||||
return files, dirs
|
||||
return (
|
||||
[
|
||||
GithubReadFolderBlock.Output.FileEntry(
|
||||
name=entry["name"],
|
||||
path=entry["path"],
|
||||
size=entry["size"],
|
||||
)
|
||||
for entry in content["entries"]
|
||||
if entry["type"] == "file"
|
||||
],
|
||||
[
|
||||
GithubReadFolderBlock.Output.DirEntry(
|
||||
name=entry["name"],
|
||||
path=entry["path"],
|
||||
)
|
||||
for entry in content["entries"]
|
||||
if entry["type"] == "dir"
|
||||
],
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
@@ -612,16 +656,26 @@ class GithubMakeBranchBlock(Block):
|
||||
new_branch: str,
|
||||
source_branch: str,
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
# Get the SHA of the source branch
|
||||
ref_url = repo_url + f"/git/refs/heads/{source_branch}"
|
||||
response = api.get(ref_url)
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
ref_api_url = (
|
||||
f"https://api.github.com/repos/{repo_path}/git/refs/heads/{source_branch}"
|
||||
)
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.get(ref_api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
sha = response.json()["object"]["sha"]
|
||||
|
||||
# Create the new branch
|
||||
create_ref_url = repo_url + "/git/refs"
|
||||
create_branch_api_url = f"https://api.github.com/repos/{repo_path}/git/refs"
|
||||
data = {"ref": f"refs/heads/{new_branch}", "sha": sha}
|
||||
response = api.post(create_ref_url, json=data)
|
||||
|
||||
response = requests.post(create_branch_api_url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Branch created successfully"
|
||||
|
||||
def run(
|
||||
@@ -681,9 +735,16 @@ class GithubDeleteBranchBlock(Block):
|
||||
def delete_branch(
|
||||
credentials: GithubCredentials, repo_url: str, branch: str
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
ref_url = repo_url + f"/git/refs/heads/{branch}"
|
||||
api.delete(ref_url)
|
||||
repo_path = repo_url.replace("https://github.com/", "")
|
||||
api_url = f"https://api.github.com/repos/{repo_path}/git/refs/heads/{branch}"
|
||||
headers = {
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
response = requests.delete(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
||||
return "Branch deleted successfully"
|
||||
|
||||
def run(
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class GetRequest:
|
||||
@classmethod
|
||||
def get_request(
|
||||
cls, url: str, headers: Optional[dict] = None, json: bool = False
|
||||
) -> Any:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
response = requests.get(url, headers=headers)
|
||||
return response.json() if json else response.text
|
||||
@@ -1,10 +1,10 @@
|
||||
import json
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class HttpMethod(Enum):
|
||||
@@ -31,14 +31,9 @@ class SendWebRequestBlock(Block):
|
||||
description="The headers to include in the request",
|
||||
default={},
|
||||
)
|
||||
json_format: bool = SchemaField(
|
||||
title="JSON format",
|
||||
description="Whether to send and receive body as JSON",
|
||||
default=True,
|
||||
)
|
||||
body: Any = SchemaField(
|
||||
body: object = SchemaField(
|
||||
description="The body of the request",
|
||||
default=None,
|
||||
default={},
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -63,16 +58,13 @@ class SendWebRequestBlock(Block):
|
||||
input_data.method.value,
|
||||
input_data.url,
|
||||
headers=input_data.headers,
|
||||
json=input_data.body if input_data.json_format else None,
|
||||
data=input_data.body if not input_data.json_format else None,
|
||||
json=input_data.body,
|
||||
)
|
||||
result = response.json() if input_data.json_format else response.text
|
||||
|
||||
if response.status_code // 100 == 2:
|
||||
yield "response", result
|
||||
yield "response", response.json()
|
||||
elif response.status_code // 100 == 4:
|
||||
yield "client_error", result
|
||||
yield "client_error", response.json()
|
||||
elif response.status_code // 100 == 5:
|
||||
yield "server_error", result
|
||||
yield "server_error", response.json()
|
||||
else:
|
||||
raise ValueError(f"Unexpected status code: {response.status_code}")
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Literal, Optional
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -243,8 +242,9 @@ class IdeogramModelBlock(Block):
|
||||
|
||||
try:
|
||||
response = requests.post(url, json=data, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()["data"][0]["url"]
|
||||
except RequestException as e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise Exception(f"Failed to fetch image: {str(e)}")
|
||||
|
||||
def upscale_image(self, api_key: SecretStr, image_url: str):
|
||||
@@ -256,6 +256,7 @@ class IdeogramModelBlock(Block):
|
||||
try:
|
||||
# Step 1: Download the image from the provided URL
|
||||
image_response = requests.get(image_url)
|
||||
image_response.raise_for_status()
|
||||
|
||||
# Step 2: Send the downloaded image to the upscale API
|
||||
files = {
|
||||
@@ -271,7 +272,8 @@ class IdeogramModelBlock(Block):
|
||||
files=files,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()["data"][0]["url"]
|
||||
|
||||
except RequestException as e:
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise Exception(f"Failed to upscale image: {str(e)}")
|
||||
|
||||
@@ -11,20 +11,6 @@ JinaCredentialsInput = CredentialsMetaInput[
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="jina",
|
||||
api_key=SecretStr("mock-jina-api-key"),
|
||||
title="Mock Jina API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.type,
|
||||
}
|
||||
|
||||
|
||||
def JinaCredentialsField() -> JinaCredentialsInput:
|
||||
"""
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import requests
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
JinaCredentials,
|
||||
JinaCredentialsField,
|
||||
@@ -5,7 +7,6 @@ from backend.blocks.jina._auth import (
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class JinaChunkingBlock(Block):
|
||||
@@ -56,6 +57,7 @@ class JinaChunkingBlock(Block):
|
||||
}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
all_chunks.extend(result.get("chunks", []))
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import requests
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
JinaCredentials,
|
||||
JinaCredentialsField,
|
||||
@@ -5,7 +7,6 @@ from backend.blocks.jina._auth import (
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class JinaEmbeddingBlock(Block):
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
from groq._utils._utils import quote
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
JinaCredentials,
|
||||
JinaCredentialsField,
|
||||
JinaCredentialsInput,
|
||||
)
|
||||
from backend.blocks.search import GetRequest
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class SearchTheWebBlock(Block, GetRequest):
|
||||
class Input(BlockSchema):
|
||||
credentials: JinaCredentialsInput = JinaCredentialsField()
|
||||
query: str = SchemaField(description="The search query to search the web for")
|
||||
|
||||
class Output(BlockSchema):
|
||||
results: str = SchemaField(
|
||||
description="The search results including content from top 5 URLs"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the search fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
description="This block searches the internet for the given search query.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=SearchTheWebBlock.Input,
|
||||
output_schema=SearchTheWebBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"query": "Artificial Intelligence",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=("results", "search content"),
|
||||
test_mock={"get_request": lambda *args, **kwargs: "search content"},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: JinaCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
# Encode the search query
|
||||
encoded_query = quote(input_data.query)
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {credentials.api_key.get_secret_value()}",
|
||||
}
|
||||
|
||||
# Prepend the Jina Search URL to the encoded query
|
||||
jina_search_url = f"https://s.jina.ai/{encoded_query}"
|
||||
results = self.get_request(jina_search_url, headers=headers, json=False)
|
||||
|
||||
# Output the search results
|
||||
yield "results", results
|
||||
@@ -311,15 +311,8 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
max_tokens=max_tokens or 8192,
|
||||
)
|
||||
|
||||
if not resp.content:
|
||||
raise ValueError("No content returned from Anthropic.")
|
||||
|
||||
return (
|
||||
(
|
||||
resp.content[0].name
|
||||
if isinstance(resp.content[0], anthropic.types.ToolUseBlock)
|
||||
else resp.content[0].text
|
||||
),
|
||||
resp.content[0].text if resp.content else "",
|
||||
resp.usage.input_tokens,
|
||||
resp.usage.output_tokens,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from enum import Enum
|
||||
from typing import List, Literal
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
@@ -12,7 +13,6 @@ from backend.data.model import (
|
||||
SchemaField,
|
||||
SecretField,
|
||||
)
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
|
||||
@@ -115,7 +115,7 @@ class GetRedditPostsBlock(Block):
|
||||
def get_posts(input_data: Input) -> Iterator[praw.reddit.Submission]:
|
||||
client = get_praw(input_data.creds)
|
||||
subreddit = client.subreddit(input_data.subreddit)
|
||||
return subreddit.new(limit=input_data.post_limit or 10)
|
||||
return subreddit.new(limit=input_data.post_limit)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
current_time = datetime.now(tz=timezone.utc)
|
||||
@@ -165,10 +165,8 @@ class PostRedditCommentBlock(Block):
|
||||
def reply_post(creds: RedditCredentials, comment: RedditComment) -> str:
|
||||
client = get_praw(creds)
|
||||
submission = client.submission(id=comment.post_id)
|
||||
new_comment = submission.reply(comment.comment)
|
||||
if not new_comment:
|
||||
raise ValueError("Failed to post comment.")
|
||||
return new_comment.id
|
||||
comment = submission.reply(comment.comment)
|
||||
return comment.id # type: ignore
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "comment_id", self.reply_post(input_data.creds, input_data.data)
|
||||
|
||||
@@ -5,7 +5,6 @@ from typing import Literal
|
||||
import replicate
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
@@ -198,7 +197,7 @@ class ReplicateFluxAdvancedModelBlock(Block):
|
||||
client = replicate.Client(api_token=api_key.get_secret_value())
|
||||
|
||||
# Run the model with additional parameters
|
||||
output: FileOutput | list[FileOutput] = client.run( # type: ignore This is because they changed the return type, and didn't update the type hint! It should be overloaded depending on the value of `use_file_output` to `FileOutput | list[FileOutput]` but it's `Any | Iterator[Any]`
|
||||
output = client.run(
|
||||
f"{model_name}",
|
||||
input={
|
||||
"prompt": prompt,
|
||||
@@ -211,21 +210,13 @@ class ReplicateFluxAdvancedModelBlock(Block):
|
||||
"output_quality": output_quality,
|
||||
"safety_tolerance": safety_tolerance,
|
||||
},
|
||||
wait=False, # don't arbitrarily return data:octect/stream or sometimes url depending on the model???? what is this api
|
||||
)
|
||||
|
||||
# Check if output is a list or a string and extract accordingly; otherwise, assign a default message
|
||||
if isinstance(output, list) and len(output) > 0:
|
||||
if isinstance(output[0], FileOutput):
|
||||
result_url = output[0].url # If output is a list, get the first element
|
||||
else:
|
||||
result_url = output[
|
||||
0
|
||||
] # If output is a list and not a FileOutput, get the first element. Should never happen, but just in case.
|
||||
elif isinstance(output, FileOutput):
|
||||
result_url = output.url # If output is a FileOutput, use the url
|
||||
result_url = output[0] # If output is a list, get the first element
|
||||
elif isinstance(output, str):
|
||||
result_url = output # If output is a string (for some reason due to their janky type hinting), use it directly
|
||||
result_url = output # If output is a string, use it directly
|
||||
else:
|
||||
result_url = (
|
||||
"No output received" # Fallback message if output is not as expected
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
from typing import Literal
|
||||
from typing import Any, Literal
|
||||
from urllib.parse import quote
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.blocks.helpers.http import GetRequest
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
|
||||
|
||||
class GetRequest:
|
||||
@classmethod
|
||||
def get_request(cls, url: str, json=False) -> Any:
|
||||
response = requests.get(url)
|
||||
response.raise_for_status()
|
||||
return response.json() if json else response.text
|
||||
|
||||
|
||||
class GetWikipediaSummaryBlock(Block, GetRequest):
|
||||
class Input(BlockSchema):
|
||||
topic: str = SchemaField(description="The topic to fetch the summary for")
|
||||
@@ -40,6 +48,42 @@ class GetWikipediaSummaryBlock(Block, GetRequest):
|
||||
yield "summary", response["extract"]
|
||||
|
||||
|
||||
class SearchTheWebBlock(Block, GetRequest):
|
||||
class Input(BlockSchema):
|
||||
query: str = SchemaField(description="The search query to search the web for")
|
||||
|
||||
class Output(BlockSchema):
|
||||
results: str = SchemaField(
|
||||
description="The search results including content from top 5 URLs"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the search fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
description="This block searches the internet for the given search query.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=SearchTheWebBlock.Input,
|
||||
output_schema=SearchTheWebBlock.Output,
|
||||
test_input={"query": "Artificial Intelligence"},
|
||||
test_output=("results", "search content"),
|
||||
test_mock={"get_request": lambda url, json: "search content"},
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
# Encode the search query
|
||||
encoded_query = quote(input_data.query)
|
||||
|
||||
# Prepend the Jina Search URL to the encoded query
|
||||
jina_search_url = f"https://s.jina.ai/{encoded_query}"
|
||||
|
||||
# Make the request to Jina Search
|
||||
response = self.get_request(jina_search_url, json=False)
|
||||
|
||||
# Output the search results
|
||||
yield "results", response
|
||||
|
||||
|
||||
class ExtractWebsiteContentBlock(Block, GetRequest):
|
||||
class Input(BlockSchema):
|
||||
url: str = SchemaField(description="The URL to scrape the content from")
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import time
|
||||
from typing import Literal
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -118,6 +118,7 @@ class CreateTalkingAvatarVideoBlock(Block):
|
||||
"authorization": f"Basic {api_key.get_secret_value()}",
|
||||
}
|
||||
response = requests.post(url, json=payload, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_clip_status(self, api_key: SecretStr, clip_id: str) -> dict:
|
||||
@@ -127,6 +128,7 @@ class CreateTalkingAvatarVideoBlock(Block):
|
||||
"authorization": f"Basic {api_key.get_secret_value()}",
|
||||
}
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def run(
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from typing import Any, Literal
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import APIKeyCredentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -86,6 +86,7 @@ class UnrealTextToSpeechBlock(Block):
|
||||
}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def run(
|
||||
|
||||
@@ -20,7 +20,6 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.blocks.ai_shortform_video_block import AIShortformVideoCreatorBlock
|
||||
from backend.blocks.ideogram import IdeogramModelBlock
|
||||
from backend.blocks.jina.search import SearchTheWebBlock
|
||||
from backend.blocks.llm import (
|
||||
MODEL_METADATA,
|
||||
AIConversationBlock,
|
||||
@@ -30,7 +29,7 @@ from backend.blocks.llm import (
|
||||
LlmModel,
|
||||
)
|
||||
from backend.blocks.replicate_flux_advanced import ReplicateFluxAdvancedModelBlock
|
||||
from backend.blocks.search import ExtractWebsiteContentBlock
|
||||
from backend.blocks.search import ExtractWebsiteContentBlock, SearchTheWebBlock
|
||||
from backend.blocks.talking_head import CreateTalkingAvatarVideoBlock
|
||||
from backend.data.block import Block, BlockInput, get_block
|
||||
from backend.util.settings import Config
|
||||
|
||||
@@ -9,11 +9,14 @@ from prisma.models import (
|
||||
AgentNodeExecution,
|
||||
AgentNodeExecutionInputOutput,
|
||||
)
|
||||
from prisma.types import AgentGraphExecutionWhereInput
|
||||
from prisma.types import (
|
||||
AgentGraphExecutionInclude,
|
||||
AgentGraphExecutionWhereInput,
|
||||
AgentNodeExecutionInclude,
|
||||
)
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
|
||||
from backend.util import json, mock
|
||||
|
||||
|
||||
@@ -107,6 +110,24 @@ class ExecutionResult(BaseModel):
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
|
||||
EXECUTION_RESULT_INCLUDE: AgentNodeExecutionInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentNode": True,
|
||||
"AgentGraphExecution": True,
|
||||
}
|
||||
|
||||
GRAPH_EXECUTION_INCLUDE: AgentGraphExecutionInclude = {
|
||||
"AgentNodeExecutions": {
|
||||
"include": {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentNode": True,
|
||||
"AgentGraphExecution": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def create_graph_execution(
|
||||
graph_id: str,
|
||||
@@ -247,9 +268,21 @@ async def update_graph_execution_start_time(graph_exec_id: str):
|
||||
|
||||
async def update_graph_execution_stats(
|
||||
graph_exec_id: str,
|
||||
stats: dict[str, Any],
|
||||
error: Exception | None,
|
||||
wall_time: float,
|
||||
cpu_time: float,
|
||||
node_count: int,
|
||||
):
|
||||
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
|
||||
status = ExecutionStatus.FAILED if error else ExecutionStatus.COMPLETED
|
||||
stats = (
|
||||
{
|
||||
"walltime": wall_time,
|
||||
"cputime": cpu_time,
|
||||
"nodecount": node_count,
|
||||
"error": str(error) if error else None,
|
||||
},
|
||||
)
|
||||
|
||||
await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
data={
|
||||
|
||||
@@ -1,24 +1,30 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Literal, Type
|
||||
from typing import Any, Literal
|
||||
|
||||
import prisma.types
|
||||
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
|
||||
from prisma.types import AgentGraphWhereInput
|
||||
from pydantic.fields import computed_field
|
||||
from prisma.types import AgentGraphInclude
|
||||
from pydantic import BaseModel
|
||||
from pydantic_core import PydanticUndefinedType
|
||||
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.data.block import BlockInput, BlockType, get_block, get_blocks
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock, BlockType
|
||||
from backend.data.block import BlockInput, get_block, get_blocks
|
||||
from backend.data.db import BaseDbModel, transaction
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from backend.util import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InputSchemaItem(BaseModel):
|
||||
node_id: str
|
||||
description: str | None = None
|
||||
title: str | None = None
|
||||
|
||||
|
||||
class Link(BaseDbModel):
|
||||
source_id: str
|
||||
sink_id: str
|
||||
@@ -63,7 +69,7 @@ class Node(BaseDbModel):
|
||||
return obj
|
||||
|
||||
|
||||
class GraphExecution(BaseDbModel):
|
||||
class ExecutionMeta(BaseDbModel):
|
||||
execution_id: str
|
||||
started_at: datetime
|
||||
ended_at: datetime
|
||||
@@ -72,19 +78,20 @@ class GraphExecution(BaseDbModel):
|
||||
status: ExecutionStatus
|
||||
|
||||
@staticmethod
|
||||
def from_db(execution: AgentGraphExecution):
|
||||
def from_agent_graph_execution(execution: AgentGraphExecution):
|
||||
now = datetime.now(timezone.utc)
|
||||
start_time = execution.startedAt or execution.createdAt
|
||||
end_time = execution.updatedAt or now
|
||||
duration = (end_time - start_time).total_seconds()
|
||||
total_run_time = duration
|
||||
|
||||
if execution.stats:
|
||||
stats = json.loads(execution.stats)
|
||||
duration = stats.get("walltime", duration)
|
||||
total_run_time = stats.get("nodes_walltime", total_run_time)
|
||||
total_run_time = 0
|
||||
if execution.AgentNodeExecutions:
|
||||
for node_execution in execution.AgentNodeExecutions:
|
||||
node_start = node_execution.startedTime or now
|
||||
node_end = node_execution.endedTime or now
|
||||
total_run_time += (node_end - node_start).total_seconds()
|
||||
|
||||
return GraphExecution(
|
||||
return ExecutionMeta(
|
||||
id=execution.id,
|
||||
execution_id=execution.id,
|
||||
started_at=start_time,
|
||||
@@ -95,70 +102,39 @@ class GraphExecution(BaseDbModel):
|
||||
)
|
||||
|
||||
|
||||
class Graph(BaseDbModel):
|
||||
class GraphMeta(BaseDbModel):
|
||||
version: int = 1
|
||||
is_active: bool = True
|
||||
is_template: bool = False
|
||||
name: str
|
||||
description: str
|
||||
executions: list[GraphExecution] = []
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
executions: list[ExecutionMeta] | None = None
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
def from_db(graph: AgentGraph):
|
||||
if graph.AgentGraphExecution:
|
||||
executions = [
|
||||
ExecutionMeta.from_agent_graph_execution(execution)
|
||||
for execution in graph.AgentGraphExecution
|
||||
]
|
||||
else:
|
||||
executions = None
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def input_schema(self) -> dict[str, Any]:
|
||||
return self._generate_schema(
|
||||
AgentInputBlock.Input,
|
||||
[
|
||||
node.input_default
|
||||
for node in self.nodes
|
||||
if (b := get_block(node.block_id))
|
||||
and b.block_type == BlockType.INPUT
|
||||
and "name" in node.input_default
|
||||
],
|
||||
return GraphMeta(
|
||||
id=graph.id,
|
||||
version=graph.version,
|
||||
is_active=graph.isActive,
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
)
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def output_schema(self) -> dict[str, Any]:
|
||||
return self._generate_schema(
|
||||
AgentOutputBlock.Input,
|
||||
[
|
||||
node.input_default
|
||||
for node in self.nodes
|
||||
if (b := get_block(node.block_id))
|
||||
and b.block_type == BlockType.OUTPUT
|
||||
and "name" in node.input_default
|
||||
],
|
||||
)
|
||||
|
||||
class Graph(GraphMeta):
|
||||
nodes: list[Node]
|
||||
links: list[Link]
|
||||
subgraphs: dict[str, list[str]] = {} # subgraph_id -> [node_id]
|
||||
|
||||
@property
|
||||
def starting_nodes(self) -> list[Node]:
|
||||
@@ -166,7 +142,7 @@ class Graph(BaseDbModel):
|
||||
input_nodes = {
|
||||
v.id
|
||||
for v in self.nodes
|
||||
if (b := get_block(v.block_id)) and b.block_type == BlockType.INPUT
|
||||
if isinstance(get_block(v.block_id), AgentInputBlock)
|
||||
}
|
||||
return [
|
||||
node
|
||||
@@ -174,6 +150,28 @@ class Graph(BaseDbModel):
|
||||
if node.id not in outbound_nodes or node.id in input_nodes
|
||||
]
|
||||
|
||||
@property
|
||||
def ending_nodes(self) -> list[Node]:
|
||||
return [
|
||||
v for v in self.nodes if isinstance(get_block(v.block_id), AgentOutputBlock)
|
||||
]
|
||||
|
||||
@property
|
||||
def subgraph_map(self) -> dict[str, str]:
|
||||
"""
|
||||
Returns a mapping of node_id to subgraph_id.
|
||||
A node in the main graph will be mapped to the graph's id.
|
||||
"""
|
||||
subgraph_map = {
|
||||
node_id: subgraph_id
|
||||
for subgraph_id, node_ids in self.subgraphs.items()
|
||||
for node_id in node_ids
|
||||
}
|
||||
subgraph_map.update(
|
||||
{node.id: self.id for node in self.nodes if node.id not in subgraph_map}
|
||||
)
|
||||
return subgraph_map
|
||||
|
||||
def reassign_ids(self, reassign_graph_id: bool = False):
|
||||
"""
|
||||
Reassigns all IDs in the graph to new UUIDs.
|
||||
@@ -181,7 +179,11 @@ class Graph(BaseDbModel):
|
||||
"""
|
||||
self.validate_graph()
|
||||
|
||||
id_map = {node.id: str(uuid.uuid4()) for node in self.nodes}
|
||||
id_map = {
|
||||
**{node.id: str(uuid.uuid4()) for node in self.nodes},
|
||||
**{subgraph_id: str(uuid.uuid4()) for subgraph_id in self.subgraphs},
|
||||
}
|
||||
|
||||
if reassign_graph_id:
|
||||
self.id = str(uuid.uuid4())
|
||||
|
||||
@@ -192,15 +194,16 @@ class Graph(BaseDbModel):
|
||||
link.source_id = id_map[link.source_id]
|
||||
link.sink_id = id_map[link.sink_id]
|
||||
|
||||
self.subgraphs = {
|
||||
id_map[subgraph_id]: [id_map[node_id] for node_id in node_ids]
|
||||
for subgraph_id, node_ids in self.subgraphs.items()
|
||||
}
|
||||
|
||||
def validate_graph(self, for_run: bool = False):
|
||||
def sanitize(name):
|
||||
return name.split("_#_")[0].split("_@_")[0].split("_$_")[0]
|
||||
|
||||
input_links = defaultdict(list)
|
||||
for link in self.links:
|
||||
input_links[link.sink_id].append(link)
|
||||
|
||||
# Nodes: required fields are filled or connected
|
||||
# Nodes: required fields are filled or connected, except for InputBlock.
|
||||
for node in self.nodes:
|
||||
block = get_block(node.block_id)
|
||||
if block is None:
|
||||
@@ -208,7 +211,7 @@ class Graph(BaseDbModel):
|
||||
|
||||
provided_inputs = set(
|
||||
[sanitize(name) for name in node.input_default]
|
||||
+ [sanitize(link.sink_name) for link in input_links.get(node.id, [])]
|
||||
+ [sanitize(link.sink_name) for link in node.input_links]
|
||||
)
|
||||
for name in block.input_schema.get_required_fields():
|
||||
if name not in provided_inputs and (
|
||||
@@ -219,7 +222,6 @@ class Graph(BaseDbModel):
|
||||
raise ValueError(
|
||||
f"Node {block.name} #{node.id} required input missing: `{name}`"
|
||||
)
|
||||
|
||||
node_map = {v.id: v for v in self.nodes}
|
||||
|
||||
def is_static_output_block(nid: str) -> bool:
|
||||
@@ -227,6 +229,18 @@ class Graph(BaseDbModel):
|
||||
b = get_block(bid)
|
||||
return b.static_output if b else False
|
||||
|
||||
def is_input_output_block(nid: str) -> bool:
|
||||
bid = node_map[nid].block_id
|
||||
b = get_block(bid)
|
||||
return isinstance(b, AgentInputBlock) or isinstance(b, AgentOutputBlock)
|
||||
|
||||
# subgraphs: all nodes in subgraph must be present in the graph.
|
||||
for subgraph_id, node_ids in self.subgraphs.items():
|
||||
for node_id in node_ids:
|
||||
if node_id not in node_map:
|
||||
raise ValueError(f"Subgraph {subgraph_id}'s node {node_id} invalid")
|
||||
subgraph_map = self.subgraph_map
|
||||
|
||||
# Links: links are connected and the connected pin data type are compatible.
|
||||
for link in self.links:
|
||||
source = (link.source_id, link.source_name)
|
||||
@@ -255,27 +269,66 @@ class Graph(BaseDbModel):
|
||||
if sanitized_name not in fields:
|
||||
raise ValueError(f"{suffix}, `{name}` invalid, {fields}")
|
||||
|
||||
if (
|
||||
subgraph_map.get(link.source_id) != subgraph_map.get(link.sink_id)
|
||||
and not is_input_output_block(link.source_id)
|
||||
and not is_input_output_block(link.sink_id)
|
||||
):
|
||||
raise ValueError(f"{suffix}, Connecting nodes from different subgraph.")
|
||||
|
||||
if is_static_output_block(link.source_id):
|
||||
link.is_static = True # Each value block output should be static.
|
||||
|
||||
# TODO: Add type compatibility check here.
|
||||
|
||||
def get_input_schema(self) -> list[InputSchemaItem]:
|
||||
"""
|
||||
Walks the graph and returns all the inputs that are either not:
|
||||
- static
|
||||
- provided by parent node
|
||||
"""
|
||||
input_schema = []
|
||||
for node in self.nodes:
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
continue
|
||||
|
||||
for input_name, input_schema_item in (
|
||||
block.input_schema.jsonschema().get("properties", {}).items()
|
||||
):
|
||||
# Check if the input is not static and not provided by a parent node
|
||||
if (
|
||||
input_name not in node.input_default
|
||||
and not any(
|
||||
link.sink_name == input_name for link in node.input_links
|
||||
)
|
||||
and isinstance(
|
||||
block.input_schema.model_fields.get(input_name).default,
|
||||
PydanticUndefinedType,
|
||||
)
|
||||
):
|
||||
input_schema.append(
|
||||
InputSchemaItem(
|
||||
node_id=node.id,
|
||||
description=input_schema_item.get("description"),
|
||||
title=input_schema_item.get("title"),
|
||||
)
|
||||
)
|
||||
|
||||
return input_schema
|
||||
|
||||
@staticmethod
|
||||
def from_db(graph: AgentGraph, hide_credentials: bool = False):
|
||||
executions = [
|
||||
GraphExecution.from_db(execution)
|
||||
for execution in graph.AgentGraphExecution or []
|
||||
nodes = [
|
||||
*(graph.AgentNodes or []),
|
||||
*(
|
||||
node
|
||||
for subgraph in graph.AgentSubGraphs or []
|
||||
for node in subgraph.AgentNodes or []
|
||||
),
|
||||
]
|
||||
nodes = graph.AgentNodes or []
|
||||
|
||||
return Graph(
|
||||
id=graph.id,
|
||||
version=graph.version,
|
||||
is_active=graph.isActive,
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
**GraphMeta.from_db(graph).model_dump(),
|
||||
nodes=[Graph._process_node(node, hide_credentials) for node in nodes],
|
||||
links=list(
|
||||
{
|
||||
@@ -284,6 +337,10 @@ class Graph(BaseDbModel):
|
||||
for link in (node.Input or []) + (node.Output or [])
|
||||
}
|
||||
),
|
||||
subgraphs={
|
||||
subgraph.id: [node.id for node in subgraph.AgentNodes or []]
|
||||
for subgraph in graph.AgentSubGraphs or []
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -312,6 +369,20 @@ class Graph(BaseDbModel):
|
||||
return result
|
||||
|
||||
|
||||
AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
|
||||
__SUBGRAPH_INCLUDE = {"AgentNodes": {"include": AGENT_NODE_INCLUDE}}
|
||||
|
||||
AGENT_GRAPH_INCLUDE: prisma.types.AgentGraphInclude = {
|
||||
**__SUBGRAPH_INCLUDE,
|
||||
"AgentSubGraphs": {"include": __SUBGRAPH_INCLUDE}, # type: ignore
|
||||
}
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
|
||||
|
||||
@@ -323,11 +394,11 @@ async def get_node(node_id: str) -> Node:
|
||||
return Node.from_db(node)
|
||||
|
||||
|
||||
async def get_graphs(
|
||||
async def get_graphs_meta(
|
||||
user_id: str,
|
||||
include_executions: bool = False,
|
||||
filter_by: Literal["active", "template"] | None = "active",
|
||||
) -> list[Graph]:
|
||||
) -> list[GraphMeta]:
|
||||
"""
|
||||
Retrieves graph metadata objects.
|
||||
Default behaviour is to get all currently active graphs.
|
||||
@@ -338,9 +409,9 @@ async def get_graphs(
|
||||
user_id: The ID of the user that owns the graph.
|
||||
|
||||
Returns:
|
||||
list[Graph]: A list of objects representing the retrieved graph metadata.
|
||||
list[GraphMeta]: A list of objects representing the retrieved graph metadata.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
where_clause: prisma.types.AgentGraphWhereInput = {}
|
||||
|
||||
if filter_by == "active":
|
||||
where_clause["isActive"] = True
|
||||
@@ -349,17 +420,23 @@ async def get_graphs(
|
||||
|
||||
where_clause["userId"] = user_id
|
||||
|
||||
graph_include = AGENT_GRAPH_INCLUDE
|
||||
graph_include["AgentGraphExecution"] = include_executions
|
||||
|
||||
graphs = await AgentGraph.prisma().find_many(
|
||||
where=where_clause,
|
||||
distinct=["id"],
|
||||
order={"version": "desc"},
|
||||
include=graph_include,
|
||||
include=(
|
||||
AgentGraphInclude(
|
||||
AgentGraphExecution={"include": {"AgentNodeExecutions": True}}
|
||||
)
|
||||
if include_executions
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
return [Graph.from_db(graph) for graph in graphs]
|
||||
if not graphs:
|
||||
return []
|
||||
|
||||
return [GraphMeta.from_db(graph) for graph in graphs]
|
||||
|
||||
|
||||
async def get_graph(
|
||||
@@ -376,7 +453,7 @@ async def get_graph(
|
||||
|
||||
Returns `None` if the record is not found.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {
|
||||
where_clause: prisma.types.AgentGraphWhereInput = {
|
||||
"id": graph_id,
|
||||
"isTemplate": template,
|
||||
}
|
||||
@@ -385,7 +462,7 @@ async def get_graph(
|
||||
elif not template:
|
||||
where_clause["isActive"] = True
|
||||
|
||||
if user_id is not None and not template:
|
||||
if user_id and not template:
|
||||
where_clause["userId"] = user_id
|
||||
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
@@ -471,13 +548,33 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
}
|
||||
)
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
AgentGraph.prisma(tx).create(
|
||||
data={
|
||||
"id": subgraph_id,
|
||||
"agentGraphParentId": graph.id,
|
||||
"version": graph.version,
|
||||
"name": f"SubGraph of {graph.name}",
|
||||
"description": f"Sub-Graph of {graph.id}",
|
||||
"isTemplate": graph.is_template,
|
||||
"isActive": graph.is_active,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
for subgraph_id in graph.subgraphs
|
||||
]
|
||||
)
|
||||
|
||||
subgraph_map = graph.subgraph_map
|
||||
|
||||
await asyncio.gather(
|
||||
*[
|
||||
AgentNode.prisma(tx).create(
|
||||
{
|
||||
"id": node.id,
|
||||
"agentBlockId": node.block_id,
|
||||
"agentGraphId": graph.id,
|
||||
"agentGraphId": subgraph_map.get(node.id, graph.id),
|
||||
"agentGraphVersion": graph.version,
|
||||
"constantInput": json.dumps(node.input_default),
|
||||
"metadata": json.dumps(node.metadata),
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
import prisma
|
||||
|
||||
AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
|
||||
AGENT_GRAPH_INCLUDE: prisma.types.AgentGraphInclude = {
|
||||
"AgentNodes": {"include": AGENT_NODE_INCLUDE} # type: ignore
|
||||
}
|
||||
|
||||
EXECUTION_RESULT_INCLUDE: prisma.types.AgentNodeExecutionInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentNode": True,
|
||||
"AgentGraphExecution": True,
|
||||
}
|
||||
|
||||
GRAPH_EXECUTION_INCLUDE: prisma.types.AgentGraphExecutionInclude = {
|
||||
"AgentNodeExecutions": {
|
||||
"include": {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"AgentNode": True,
|
||||
"AgentGraphExecution": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -36,12 +36,7 @@ from backend.util import json
|
||||
from backend.util.decorator import error_logged, time_measured
|
||||
from backend.util.logging import configure_logging
|
||||
from backend.util.process import set_service_name
|
||||
from backend.util.service import (
|
||||
AppService,
|
||||
close_service_client,
|
||||
expose,
|
||||
get_service_client,
|
||||
)
|
||||
from backend.util.service import AppService, expose, get_service_client
|
||||
from backend.util.settings import Settings
|
||||
from backend.util.type import convert
|
||||
|
||||
@@ -457,8 +452,6 @@ class Executor:
|
||||
cls.creds_manager.release_all_locks()
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ⏳ Disconnecting Redis...")
|
||||
redis.disconnect()
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ⏳ Disconnecting DB manager...")
|
||||
close_service_client(cls.db_client)
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ✅ Finished cleanup")
|
||||
|
||||
@classmethod
|
||||
@@ -480,7 +473,7 @@ class Executor:
|
||||
cls,
|
||||
q: ExecutionQueue[NodeExecution],
|
||||
node_exec: NodeExecution,
|
||||
) -> dict[str, Any]:
|
||||
):
|
||||
log_metadata = LogMetadata(
|
||||
user_id=node_exec.user_id,
|
||||
graph_eid=node_exec.graph_exec_id,
|
||||
@@ -500,7 +493,6 @@ class Executor:
|
||||
cls.db_client.update_node_execution_stats(
|
||||
node_exec.node_exec_id, execution_stats
|
||||
)
|
||||
return execution_stats
|
||||
|
||||
@classmethod
|
||||
@time_measured
|
||||
@@ -544,8 +536,6 @@ class Executor:
|
||||
prefix = f"[on_graph_executor_stop {cls.pid}]"
|
||||
logger.info(f"{prefix} ⏳ Terminating node executor pool...")
|
||||
cls.executor.terminate()
|
||||
logger.info(f"{prefix} ⏳ Disconnecting DB manager...")
|
||||
close_service_client(cls.db_client)
|
||||
logger.info(f"{prefix} ✅ Finished cleanup")
|
||||
|
||||
@classmethod
|
||||
@@ -566,15 +556,16 @@ class Executor:
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
timing_info, (exec_stats, error) = cls._on_graph_execution(
|
||||
timing_info, (node_count, error) = cls._on_graph_execution(
|
||||
graph_exec, cancel, log_metadata
|
||||
)
|
||||
exec_stats["walltime"] = timing_info.wall_time
|
||||
exec_stats["cputime"] = timing_info.cpu_time
|
||||
exec_stats["error"] = str(error) if error else None
|
||||
|
||||
cls.db_client.update_graph_execution_stats(
|
||||
graph_exec_id=graph_exec.graph_exec_id,
|
||||
stats=exec_stats,
|
||||
error=error,
|
||||
wall_time=timing_info.wall_time,
|
||||
cpu_time=timing_info.cpu_time,
|
||||
node_count=node_count,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -584,18 +575,14 @@ class Executor:
|
||||
graph_exec: GraphExecution,
|
||||
cancel: threading.Event,
|
||||
log_metadata: LogMetadata,
|
||||
) -> tuple[dict[str, Any], Exception | None]:
|
||||
) -> tuple[int, Exception | None]:
|
||||
"""
|
||||
Returns:
|
||||
The execution statistics of the graph execution.
|
||||
The number of node executions completed.
|
||||
The error that occurred during the execution.
|
||||
"""
|
||||
log_metadata.info(f"Start graph execution {graph_exec.graph_exec_id}")
|
||||
exec_stats = {
|
||||
"nodes_walltime": 0,
|
||||
"nodes_cputime": 0,
|
||||
"node_count": 0,
|
||||
}
|
||||
n_node_executions = 0
|
||||
error = None
|
||||
finished = False
|
||||
|
||||
@@ -621,20 +608,17 @@ class Executor:
|
||||
def make_exec_callback(exec_data: NodeExecution):
|
||||
node_id = exec_data.node_id
|
||||
|
||||
def callback(result: object):
|
||||
def callback(_):
|
||||
running_executions.pop(node_id)
|
||||
nonlocal exec_stats
|
||||
if isinstance(result, dict):
|
||||
exec_stats["node_count"] += 1
|
||||
exec_stats["nodes_cputime"] += result.get("cputime", 0)
|
||||
exec_stats["nodes_walltime"] += result.get("walltime", 0)
|
||||
nonlocal n_node_executions
|
||||
n_node_executions += 1
|
||||
|
||||
return callback
|
||||
|
||||
while not queue.empty():
|
||||
if cancel.is_set():
|
||||
error = RuntimeError("Execution is cancelled")
|
||||
return exec_stats, error
|
||||
return n_node_executions, error
|
||||
|
||||
exec_data = queue.get()
|
||||
|
||||
@@ -665,7 +649,7 @@ class Executor:
|
||||
for node_id, execution in list(running_executions.items()):
|
||||
if cancel.is_set():
|
||||
error = RuntimeError("Execution is cancelled")
|
||||
return exec_stats, error
|
||||
return n_node_executions, error
|
||||
|
||||
if not queue.empty():
|
||||
break # yield to parent loop to execute new queue items
|
||||
@@ -684,7 +668,7 @@ class Executor:
|
||||
finished = True
|
||||
cancel.set()
|
||||
cancel_thread.join()
|
||||
return exec_stats, error
|
||||
return n_node_executions, error
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
@@ -2,10 +2,9 @@ import time
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from backend.util.request import requests
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
@@ -57,12 +56,13 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
requests.delete(
|
||||
response = requests.delete(
|
||||
url=self.revoke_url.format(client_id=self.client_id),
|
||||
auth=(self.client_id, self.client_secret),
|
||||
headers=headers,
|
||||
json={"access_token": credentials.access_token.get_secret_value()},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
@@ -88,6 +88,7 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
}
|
||||
headers = {"Accept": "application/json"}
|
||||
response = requests.post(self.token_url, data=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data: dict = response.json()
|
||||
|
||||
username = self._request_username(token_data["access_token"])
|
||||
|
||||
@@ -103,11 +103,12 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
|
||||
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
|
||||
session = AuthorizedSession(credentials)
|
||||
session.post(
|
||||
response = session.post(
|
||||
self.revoke_uri,
|
||||
params={"token": credentials.access_token.get_secret_value()},
|
||||
headers={"content-type": "application/x-www-form-urlencoded"},
|
||||
)
|
||||
response.raise_for_status()
|
||||
return True
|
||||
|
||||
def _request_email(
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
from base64 import b64encode
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from backend.util.request import requests
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
@@ -50,6 +49,7 @@ class NotionOAuthHandler(BaseOAuthHandler):
|
||||
"Accept": "application/json",
|
||||
}
|
||||
response = requests.post(self.token_url, json=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data = response.json()
|
||||
# Email is only available for non-bot users
|
||||
email = (
|
||||
|
||||
@@ -121,8 +121,8 @@ class DeleteGraphResponse(TypedDict):
|
||||
async def get_graphs(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
|
||||
@@ -290,6 +290,22 @@ async def stop_graph_run(
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/input_schema",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph_input_schema(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[graph_db.InputSchemaItem]:
|
||||
try:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
return graph.get_input_schema() if graph else []
|
||||
except Exception:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
tags=["graphs"],
|
||||
@@ -358,8 +374,8 @@ async def get_graph_run_status(
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
|
||||
) -> list[graph_db.GraphMeta]:
|
||||
return await graph_db.get_graphs_meta(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
|
||||
@@ -1,132 +0,0 @@
|
||||
import ipaddress
|
||||
import socket
|
||||
from typing import Callable
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import requests as req
|
||||
|
||||
from backend.util.settings import Config
|
||||
|
||||
# List of IP networks to block
|
||||
BLOCKED_IP_NETWORKS = [
|
||||
ipaddress.ip_network("0.0.0.0/8"), # "This" Network
|
||||
ipaddress.ip_network("10.0.0.0/8"), # Private-Use
|
||||
ipaddress.ip_network("127.0.0.0/8"), # Loopback
|
||||
ipaddress.ip_network("169.254.0.0/16"), # Link Local
|
||||
ipaddress.ip_network("172.16.0.0/12"), # Private-Use
|
||||
ipaddress.ip_network("192.168.0.0/16"), # Private-Use
|
||||
ipaddress.ip_network("224.0.0.0/4"), # Multicast
|
||||
ipaddress.ip_network("240.0.0.0/4"), # Reserved for Future Use
|
||||
]
|
||||
|
||||
|
||||
def is_ip_blocked(ip: str) -> bool:
|
||||
"""
|
||||
Checks if the IP address is in a blocked network.
|
||||
"""
|
||||
ip_addr = ipaddress.ip_address(ip)
|
||||
return any(ip_addr in network for network in BLOCKED_IP_NETWORKS)
|
||||
|
||||
|
||||
def validate_url(url: str, trusted_origins: list[str]) -> str:
|
||||
"""
|
||||
Validates the URL to prevent SSRF attacks by ensuring it does not point to a private
|
||||
or untrusted IP address, unless whitelisted.
|
||||
"""
|
||||
url = url.strip().strip("/")
|
||||
if not url.startswith(("http://", "https://")):
|
||||
url = "http://" + url
|
||||
|
||||
parsed_url = urlparse(url)
|
||||
hostname = parsed_url.hostname
|
||||
|
||||
if not hostname:
|
||||
raise ValueError(f"Invalid URL: Unable to determine hostname from {url}")
|
||||
|
||||
if any(hostname == origin for origin in trusted_origins):
|
||||
return url
|
||||
|
||||
# Resolve all IP addresses for the hostname
|
||||
ip_addresses = {result[4][0] for result in socket.getaddrinfo(hostname, None)}
|
||||
if not ip_addresses:
|
||||
raise ValueError(f"Unable to resolve IP address for {hostname}")
|
||||
|
||||
# Check if all IP addresses are global
|
||||
for ip in ip_addresses:
|
||||
if is_ip_blocked(ip):
|
||||
raise ValueError(
|
||||
f"Access to private IP address at {hostname}: {ip} is not allowed."
|
||||
)
|
||||
|
||||
return url
|
||||
|
||||
|
||||
class Requests:
|
||||
"""
|
||||
A wrapper around the requests library that validates URLs before making requests.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
trusted_origins: list[str] | None = None,
|
||||
raise_for_status: bool = True,
|
||||
extra_url_validator: Callable[[str], str] | None = None,
|
||||
extra_headers: dict[str, str] | None = None,
|
||||
):
|
||||
self.trusted_origins = []
|
||||
for url in trusted_origins or []:
|
||||
hostname = urlparse(url).hostname
|
||||
if not hostname:
|
||||
raise ValueError(f"Invalid URL: Unable to determine hostname of {url}")
|
||||
self.trusted_origins.append(hostname)
|
||||
|
||||
self.raise_for_status = raise_for_status
|
||||
self.extra_url_validator = extra_url_validator
|
||||
self.extra_headers = extra_headers
|
||||
|
||||
def request(
|
||||
self, method, url, headers=None, allow_redirects=False, *args, **kwargs
|
||||
) -> req.Response:
|
||||
if self.extra_headers is not None:
|
||||
headers = {**(headers or {}), **self.extra_headers}
|
||||
|
||||
url = validate_url(url, self.trusted_origins)
|
||||
if self.extra_url_validator is not None:
|
||||
url = self.extra_url_validator(url)
|
||||
|
||||
response = req.request(
|
||||
method,
|
||||
url,
|
||||
headers=headers,
|
||||
allow_redirects=allow_redirects,
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
if self.raise_for_status:
|
||||
response.raise_for_status()
|
||||
|
||||
return response
|
||||
|
||||
def get(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("GET", url, *args, **kwargs)
|
||||
|
||||
def post(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("POST", url, *args, **kwargs)
|
||||
|
||||
def put(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("PUT", url, *args, **kwargs)
|
||||
|
||||
def delete(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("DELETE", url, *args, **kwargs)
|
||||
|
||||
def head(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("HEAD", url, *args, **kwargs)
|
||||
|
||||
def options(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("OPTIONS", url, *args, **kwargs)
|
||||
|
||||
def patch(self, url, *args, **kwargs) -> req.Response:
|
||||
return self.request("PATCH", url, *args, **kwargs)
|
||||
|
||||
|
||||
requests = Requests(trusted_origins=Config().trust_endpoints_for_requests)
|
||||
@@ -30,7 +30,6 @@ from typing import (
|
||||
import Pyro5.api
|
||||
from pydantic import BaseModel
|
||||
from Pyro5 import api as pyro
|
||||
from Pyro5 import config as pyro_config
|
||||
|
||||
from backend.data import db, redis
|
||||
from backend.util.process import AppProcess
|
||||
@@ -41,10 +40,7 @@ logger = logging.getLogger(__name__)
|
||||
T = TypeVar("T")
|
||||
C = TypeVar("C", bound=Callable)
|
||||
|
||||
config = Config()
|
||||
pyro_host = config.pyro_host
|
||||
pyro_config.MAX_RETRIES = config.pyro_client_comm_retry # type: ignore
|
||||
pyro_config.COMMTIMEOUT = config.pyro_client_comm_timeout # type: ignore
|
||||
pyro_host = Config().pyro_host
|
||||
|
||||
|
||||
def expose(func: C) -> C:
|
||||
@@ -170,14 +166,8 @@ class AppService(AppProcess, ABC):
|
||||
|
||||
@conn_retry("Pyro", "Starting Pyro Service")
|
||||
def __start_pyro(self):
|
||||
conf = Config()
|
||||
maximum_connection_thread_count = max(
|
||||
Pyro5.config.THREADPOOL_SIZE,
|
||||
conf.num_node_workers * conf.num_graph_workers,
|
||||
)
|
||||
|
||||
Pyro5.config.THREADPOOL_SIZE = maximum_connection_thread_count # type: ignore
|
||||
daemon = Pyro5.api.Daemon(host=conf.pyro_host, port=self.get_port())
|
||||
host = Config().pyro_host
|
||||
daemon = Pyro5.api.Daemon(host=host, port=self.get_port())
|
||||
self.uri = daemon.register(self, objectId=self.service_name)
|
||||
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}")
|
||||
daemon.requestLoop()
|
||||
@@ -192,21 +182,10 @@ class AppService(AppProcess, ABC):
|
||||
AS = TypeVar("AS", bound=AppService)
|
||||
|
||||
|
||||
class PyroClient:
|
||||
proxy: Pyro5.api.Proxy
|
||||
|
||||
|
||||
def close_service_client(client: AppService) -> None:
|
||||
if isinstance(client, PyroClient):
|
||||
client.proxy._pyroRelease()
|
||||
else:
|
||||
raise RuntimeError(f"Client {client.__class__} is not a Pyro client.")
|
||||
|
||||
|
||||
def get_service_client(service_type: Type[AS]) -> AS:
|
||||
service_name = service_type.service_name
|
||||
|
||||
class DynamicClient(PyroClient):
|
||||
class DynamicClient:
|
||||
@conn_retry("Pyro", f"Connecting to [{service_name}]")
|
||||
def __init__(self):
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
|
||||
|
||||
@@ -69,14 +69,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
default="localhost",
|
||||
description="The default hostname of the Pyro server.",
|
||||
)
|
||||
pyro_client_comm_timeout: float = Field(
|
||||
default=15,
|
||||
description="The default timeout in seconds, for Pyro client connections.",
|
||||
)
|
||||
pyro_client_comm_retry: int = Field(
|
||||
default=3,
|
||||
description="The default number of retries for Pyro client connections.",
|
||||
)
|
||||
enable_auth: bool = Field(
|
||||
default=True,
|
||||
description="If authentication is enabled or not",
|
||||
@@ -161,11 +153,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="Name of the event bus",
|
||||
)
|
||||
|
||||
trust_endpoints_for_requests: List[str] = Field(
|
||||
default_factory=list,
|
||||
description="A whitelist of trusted internal endpoints for the backend to make requests to.",
|
||||
)
|
||||
|
||||
backend_cors_allow_origins: List[str] = Field(default_factory=list)
|
||||
|
||||
@field_validator("backend_cors_allow_origins")
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `agentGraphParentId` on the `AgentGraph` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "AgentGraph" DROP CONSTRAINT "AgentGraph_agentGraphParentId_version_fkey";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentGraph" DROP COLUMN "agentGraphParentId";
|
||||
@@ -1,4 +0,0 @@
|
||||
-- This migration converts the stats column from a list to an object.
|
||||
UPDATE "AgentGraphExecution"
|
||||
SET "stats" = (stats::jsonb -> 0)::text
|
||||
WHERE stats IS NOT NULL AND jsonb_typeof(stats::jsonb) = 'array';
|
||||
323
autogpt_platform/backend/poetry.lock
generated
323
autogpt_platform/backend/poetry.lock
generated
@@ -180,22 +180,22 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.39.0"
|
||||
version = "0.25.9"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "anthropic-0.39.0-py3-none-any.whl", hash = "sha256:ea17093ae0ce0e1768b0c46501d6086b5bcd74ff39d68cd2d6396374e9de7c09"},
|
||||
{file = "anthropic-0.39.0.tar.gz", hash = "sha256:94671cc80765f9ce693f76d63a97ee9bef4c2d6063c044e983d21a2e262f63ba"},
|
||||
{file = "anthropic-0.25.9-py3-none-any.whl", hash = "sha256:d0b17d442160356a531593b237de55d3125cc6fa708f1268c214107e61c81c57"},
|
||||
{file = "anthropic-0.25.9.tar.gz", hash = "sha256:a4ec810b1cfbf3340af99b6f5bf599a83d66986e0f572a5f3bc4ebcab284f629"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.5.0,<5"
|
||||
distro = ">=1.7.0,<2"
|
||||
httpx = ">=0.23.0,<1"
|
||||
jiter = ">=0.4.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
tokenizers = ">=0.13.0"
|
||||
typing-extensions = ">=4.7,<5"
|
||||
|
||||
[package.extras]
|
||||
@@ -296,7 +296,7 @@ colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.9.2"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pydantic-settings = "^2.6.0"
|
||||
pyjwt = "^2.8.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.9.1"
|
||||
@@ -732,23 +732,22 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.4"
|
||||
version = "0.109.2"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"},
|
||||
{file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"},
|
||||
{file = "fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d"},
|
||||
{file = "fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
|
||||
starlette = ">=0.40.0,<0.42.0"
|
||||
starlette = ">=0.36.3,<0.37.0"
|
||||
typing-extensions = ">=4.8.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=2.11.2)", "python-multipart (>=0.0.7)", "uvicorn[standard] (>=0.12.0)"]
|
||||
all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "feedparser"
|
||||
@@ -764,6 +763,22 @@ files = [
|
||||
[package.dependencies]
|
||||
sgmllib3k = "*"
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.16.1"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
|
||||
{file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
|
||||
typing = ["typing-extensions (>=4.12.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
version = "7.1.1"
|
||||
@@ -866,6 +881,45 @@ files = [
|
||||
{file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fsspec"
|
||||
version = "2024.9.0"
|
||||
description = "File-system specification"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "fsspec-2024.9.0-py3-none-any.whl", hash = "sha256:a0947d552d8a6efa72cc2c730b12c41d043509156966cca4fb157b0f2a0c574b"},
|
||||
{file = "fsspec-2024.9.0.tar.gz", hash = "sha256:4b0afb90c2f21832df142f292649035d80b421f60a9e1c027802e5a0da2b04e8"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
abfs = ["adlfs"]
|
||||
adl = ["adlfs"]
|
||||
arrow = ["pyarrow (>=1)"]
|
||||
dask = ["dask", "distributed"]
|
||||
dev = ["pre-commit", "ruff"]
|
||||
doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
|
||||
dropbox = ["dropbox", "dropboxdrivefs", "requests"]
|
||||
full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
|
||||
fuse = ["fusepy"]
|
||||
gcs = ["gcsfs"]
|
||||
git = ["pygit2"]
|
||||
github = ["requests"]
|
||||
gs = ["gcsfs"]
|
||||
gui = ["panel"]
|
||||
hdfs = ["pyarrow (>=1)"]
|
||||
http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"]
|
||||
libarchive = ["libarchive-c"]
|
||||
oci = ["ocifs"]
|
||||
s3 = ["s3fs"]
|
||||
sftp = ["paramiko"]
|
||||
smb = ["smbprotocol"]
|
||||
ssh = ["paramiko"]
|
||||
test = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "numpy", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "requests"]
|
||||
test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,test]", "moto[server] (>4,<5)", "pytest-timeout", "xarray"]
|
||||
test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
|
||||
tqdm = ["tqdm"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-core"
|
||||
version = "2.20.0"
|
||||
@@ -899,13 +953,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.151.0"
|
||||
version = "2.147.0"
|
||||
description = "Google API Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "google_api_python_client-2.151.0-py2.py3-none-any.whl", hash = "sha256:4427b2f47cd88b0355d540c2c52215f68c337f3bc9d6aae1ceeae4525977504c"},
|
||||
{file = "google_api_python_client-2.151.0.tar.gz", hash = "sha256:a9d26d630810ed4631aea21d1de3e42072f98240aaf184a8a1a874a371115034"},
|
||||
{file = "google_api_python_client-2.147.0-py2.py3-none-any.whl", hash = "sha256:c6ecfa193c695baa41e84562d8f8f244fcd164419eca3fc9fd7565646668f9b2"},
|
||||
{file = "google_api_python_client-2.147.0.tar.gz", hash = "sha256:e864c2cf61d34c00f05278b8bdb72b93b6fa34f0de9ead51d20435f3b65f91be"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1094,13 +1148,13 @@ pydantic = ">=1.10,<3"
|
||||
|
||||
[[package]]
|
||||
name = "groq"
|
||||
version = "0.11.0"
|
||||
version = "0.8.0"
|
||||
description = "The official Python library for the groq API"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "groq-0.11.0-py3-none-any.whl", hash = "sha256:e328531c979542e563668c62260aec13b43a6ee0ca9e2fb22dff1d26f8c8ce54"},
|
||||
{file = "groq-0.11.0.tar.gz", hash = "sha256:dbb9aefedf388ddd4801ec7bf3eba7f5edb67948fec0cd2829d97244059f42a7"},
|
||||
{file = "groq-0.8.0-py3-none-any.whl", hash = "sha256:f5e4e892d45001241a930db451e633ca1f0007e3f749deaa5d7360062fcd61e3"},
|
||||
{file = "groq-0.8.0.tar.gz", hash = "sha256:37ceb2f706bd516d0bfcac8e89048a24b375172987a0d6bd9efb521c54f6deff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1356,6 +1410,40 @@ http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "huggingface-hub"
|
||||
version = "0.25.1"
|
||||
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
files = [
|
||||
{file = "huggingface_hub-0.25.1-py3-none-any.whl", hash = "sha256:a5158ded931b3188f54ea9028097312cb0acd50bffaaa2612014c3c526b44972"},
|
||||
{file = "huggingface_hub-0.25.1.tar.gz", hash = "sha256:9ff7cb327343211fbd06e2b149b8f362fd1e389454f3f14c6db75a4999ee20ff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
filelock = "*"
|
||||
fsspec = ">=2023.5.0"
|
||||
packaging = ">=20.9"
|
||||
pyyaml = ">=5.1"
|
||||
requests = "*"
|
||||
tqdm = ">=4.42.1"
|
||||
typing-extensions = ">=3.7.4.3"
|
||||
|
||||
[package.extras]
|
||||
all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
cli = ["InquirerPy (==0.3.4)"]
|
||||
dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"]
|
||||
fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"]
|
||||
hf-transfer = ["hf-transfer (>=0.1.4)"]
|
||||
inference = ["aiohttp", "minijinja (>=1.0)"]
|
||||
quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"]
|
||||
tensorflow = ["graphviz", "pydot", "tensorflow"]
|
||||
tensorflow-testing = ["keras (<3.0)", "tensorflow"]
|
||||
testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"]
|
||||
torch = ["safetensors[torch]", "torch"]
|
||||
typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "hyperframe"
|
||||
version = "6.0.1"
|
||||
@@ -1796,13 +1884,13 @@ httpx = ">=0.27.0,<0.28.0"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.54.1"
|
||||
version = "1.50.2"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.7.1"
|
||||
files = [
|
||||
{file = "openai-1.54.1-py3-none-any.whl", hash = "sha256:3cb49ccb6bfdc724ad01cc397d323ef8314fc7d45e19e9de2afdd6484a533324"},
|
||||
{file = "openai-1.54.1.tar.gz", hash = "sha256:5b832bf82002ba8c4f6e5e25c1c0f5d468c22f043711544c716eaffdb30dd6f1"},
|
||||
{file = "openai-1.50.2-py3-none-any.whl", hash = "sha256:822dd2051baa3393d0d5406990611975dd6f533020dc9375a34d4fe67e8b75f7"},
|
||||
{file = "openai-1.50.2.tar.gz", hash = "sha256:3987ae027152fc8bea745d60b02c8f4c4a76e1b5c70e73565fa556db6f78c9e6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2001,24 +2089,24 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "praw"
|
||||
version = "7.8.1"
|
||||
description = "Python Reddit API Wrapper."
|
||||
version = "7.7.1"
|
||||
description = "PRAW, an acronym for \"Python Reddit API Wrapper\", is a Python package that allows for simple access to Reddit's API."
|
||||
optional = false
|
||||
python-versions = "~=3.8"
|
||||
python-versions = "~=3.7"
|
||||
files = [
|
||||
{file = "praw-7.8.1-py3-none-any.whl", hash = "sha256:15917a81a06e20ff0aaaf1358481f4588449fa2421233040cb25e5c8202a3e2f"},
|
||||
{file = "praw-7.8.1.tar.gz", hash = "sha256:3c5767909f71e48853eb6335fef7b50a43cbe3da728cdfb16d3be92904b0a4d8"},
|
||||
{file = "praw-7.7.1-py3-none-any.whl", hash = "sha256:9ec5dc943db00c175bc6a53f4e089ce625f3fdfb27305564b616747b767d38ef"},
|
||||
{file = "praw-7.7.1.tar.gz", hash = "sha256:f1d7eef414cafe28080dda12ed09253a095a69933d5c8132eca11d4dc8a070bf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
prawcore = ">=2.4,<3"
|
||||
update_checker = ">=0.18"
|
||||
prawcore = ">=2.1,<3"
|
||||
update-checker = ">=0.18"
|
||||
websocket-client = ">=0.54.0"
|
||||
|
||||
[package.extras]
|
||||
ci = ["coveralls"]
|
||||
dev = ["packaging", "praw[lint]", "praw[test]"]
|
||||
lint = ["praw[readthedocs]", "pre-commit", "ruff (>=0.0.291)"]
|
||||
dev = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.3.0,<0.5)", "furo", "packaging", "pre-commit", "pytest (>=2.7.3)", "requests (>=2.20.1,<3)", "sphinx", "urllib3 (==1.26.*)"]
|
||||
lint = ["furo", "pre-commit", "sphinx"]
|
||||
readthedocs = ["furo", "sphinx"]
|
||||
test = ["betamax (>=0.8,<0.9)", "betamax-matchers (>=0.3.0,<0.5)", "pytest (>=2.7.3)", "requests (>=2.20.1,<3)", "urllib3 (==1.26.*)"]
|
||||
|
||||
@@ -2044,13 +2132,13 @@ test = ["betamax (>=0.8,<0.9)", "pytest (>=2.7.3)", "urllib3 (==1.26.*)"]
|
||||
|
||||
[[package]]
|
||||
name = "prisma"
|
||||
version = "0.15.0"
|
||||
version = "0.13.1"
|
||||
description = "Prisma Client Python is an auto-generated and fully type-safe database client"
|
||||
optional = false
|
||||
python-versions = ">=3.8.0"
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "prisma-0.15.0-py3-none-any.whl", hash = "sha256:de949cc94d3d91243615f22ff64490aa6e2d7cb81aabffce53d92bd3977c09a4"},
|
||||
{file = "prisma-0.15.0.tar.gz", hash = "sha256:5cd6402aa8322625db3fc1152040404e7fc471fe7f8fa3a314fa8a99529ca107"},
|
||||
{file = "prisma-0.13.1-py3-none-any.whl", hash = "sha256:b79ad69bdf09b217431904c1250c36421233ea394a230f1665f5699fd842ea20"},
|
||||
{file = "prisma-0.13.1.tar.gz", hash = "sha256:f0f86a67c38e6f08b53cce9272dd9c736f69f4fcbb94dbdfa87bf44f983e925d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2308,13 +2396,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.6.1"
|
||||
version = "2.6.0"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"},
|
||||
{file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"},
|
||||
{file = "pydantic_settings-2.6.0-py3-none-any.whl", hash = "sha256:4a819166f119b74d7f8c765196b165f95cc7487ce58ea27dec8a5a26be0970e0"},
|
||||
{file = "pydantic_settings-2.6.0.tar.gz", hash = "sha256:44a1804abffac9e6a30372bb45f6cafab945ef5af25e66b1c634c01dd39e0188"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2426,17 +2514,17 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.24.0"
|
||||
version = "0.23.8"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"},
|
||||
{file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"},
|
||||
{file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"},
|
||||
{file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=8.2,<9"
|
||||
pytest = ">=7.0.0,<9"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
|
||||
@@ -2577,13 +2665,13 @@ websockets = ">=11,<14"
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "5.2.0"
|
||||
version = "5.1.0"
|
||||
description = "Python client for Redis database and key-value store"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"},
|
||||
{file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"},
|
||||
{file = "redis-5.1.0-py3-none-any.whl", hash = "sha256:fd4fccba0d7f6aa48c58a78d76ddb4afc698f5da4a2c1d03d916e4fd7ab88cdd"},
|
||||
{file = "redis-5.1.0.tar.gz", hash = "sha256:b756df1e4a3858fcc0ef861f3fc53623a96c41e2b1f5304e09e0fe758d333d40"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2610,13 +2698,13 @@ rpds-py = ">=0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "replicate"
|
||||
version = "1.0.3"
|
||||
version = "0.34.1"
|
||||
description = "Python client for Replicate"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "replicate-1.0.3-py3-none-any.whl", hash = "sha256:8c49d63444b7ea9ac1d6af99eb23a01efb5b7f079cc8a020d6f52b38843db1da"},
|
||||
{file = "replicate-1.0.3.tar.gz", hash = "sha256:0fd9ca5230fe67c42e4508dd96a5b1414b3fefa5342f8921dbb63c74266cb130"},
|
||||
{file = "replicate-0.34.1-py3-none-any.whl", hash = "sha256:beeebbdd83dca46eee960c383dfd8dcc48d7922d9fe9e613f242cc69ed522f2f"},
|
||||
{file = "replicate-0.34.1.tar.gz", hash = "sha256:57cf80c7f4d7f6ae503b1bef400f57c26d494724002d7e9a8750d01394dcfc76"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2819,13 +2907,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.18.0"
|
||||
version = "2.17.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "sentry_sdk-2.18.0-py2.py3-none-any.whl", hash = "sha256:ee70e27d1bbe4cd52a38e1bd28a5fadb9b17bc29d91b5f2b97ae29c0a7610442"},
|
||||
{file = "sentry_sdk-2.18.0.tar.gz", hash = "sha256:0dc21febd1ab35c648391c664df96f5f79fb0d92d7d4225cd9832e53a617cafd"},
|
||||
{file = "sentry_sdk-2.17.0-py2.py3-none-any.whl", hash = "sha256:625955884b862cc58748920f9e21efdfb8e0d4f98cca4ab0d3918576d5b606ad"},
|
||||
{file = "sentry_sdk-2.17.0.tar.gz", hash = "sha256:dd0a05352b78ffeacced73a94e86f38b32e2eae15fff5f30ca5abb568a72eacf"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2853,11 +2941,9 @@ httpx = ["httpx (>=0.16.0)"]
|
||||
huey = ["huey (>=2)"]
|
||||
huggingface-hub = ["huggingface-hub (>=0.22)"]
|
||||
langchain = ["langchain (>=0.0.210)"]
|
||||
launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"]
|
||||
litestar = ["litestar (>=2.0.0)"]
|
||||
loguru = ["loguru (>=0.5)"]
|
||||
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
|
||||
openfeature = ["openfeature-sdk (>=0.7.1)"]
|
||||
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
|
||||
opentelemetry-experimental = ["opentelemetry-distro"]
|
||||
pure-eval = ["asttokens", "executing", "pure-eval"]
|
||||
@@ -2916,13 +3002,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.41.2"
|
||||
version = "0.36.3"
|
||||
description = "The little ASGI library that shines."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"},
|
||||
{file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"},
|
||||
{file = "starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044"},
|
||||
{file = "starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3011,6 +3097,123 @@ files = [
|
||||
doc = ["reno", "sphinx"]
|
||||
test = ["pytest", "tornado (>=4.5)", "typeguard"]
|
||||
|
||||
[[package]]
|
||||
name = "tokenizers"
|
||||
version = "0.20.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6cff5c5e37c41bc5faa519d6f3df0679e4b37da54ea1f42121719c5e2b4905c0"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:62a56bf75c27443432456f4ca5ca055befa95e25be8a28141cc495cac8ae4d6d"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68cc7de6a63f09c4a86909c2597b995aa66e19df852a23aea894929c74369929"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:053c37ecee482cc958fdee53af3c6534286a86f5d35aac476f7c246830e53ae5"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7074aaabc151a6363fa03db5493fc95b423b2a1874456783989e96d541c7b6"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a11435780f2acd89e8fefe5e81cecf01776f6edb9b3ac95bcb76baee76b30b90"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9a81cd2712973b007d84268d45fc3f6f90a79c31dfe7f1925e6732f8d2959987"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7dfd796ab9d909f76fb93080e1c7c8309f196ecb316eb130718cd5e34231c69"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8029ad2aa8cb00605c9374566034c1cc1b15130713e0eb5afcef6cface8255c9"},
|
||||
{file = "tokenizers-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ca4d54260ebe97d59dfa9a30baa20d0c4dd9137d99a8801700055c561145c24e"},
|
||||
{file = "tokenizers-0.20.0-cp310-none-win32.whl", hash = "sha256:95ee16b57cec11b86a7940174ec5197d506439b0f415ab3859f254b1dffe9df0"},
|
||||
{file = "tokenizers-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:0a61a11e93eeadbf02aea082ffc75241c4198e0608bbbac4f65a9026851dcf37"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6636b798b3c4d6c9b1af1a918bd07c867808e5a21c64324e95318a237e6366c3"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ec603e42eaf499ffd58b9258162add948717cf21372458132f14e13a6bc7172"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cce124264903a8ea6f8f48e1cc7669e5ef638c18bd4ab0a88769d5f92debdf7f"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07bbeba0231cf8de07aa6b9e33e9779ff103d47042eeeb859a8c432e3292fb98"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06c0ca8397b35d38b83a44a9c6929790c1692957d88541df061cb34d82ebbf08"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ca6557ac3b83d912dfbb1f70ab56bd4b0594043916688e906ede09f42e192401"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a5ad94c9e80ac6098328bee2e3264dbced4c6faa34429994d473f795ec58ef4"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b5c7f906ee6bec30a9dc20268a8b80f3b9584de1c9f051671cb057dc6ce28f6"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:31e087e9ee1b8f075b002bfee257e858dc695f955b43903e1bb4aa9f170e37fe"},
|
||||
{file = "tokenizers-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c3124fb6f3346cb3d8d775375d3b429bf4dcfc24f739822702009d20a4297990"},
|
||||
{file = "tokenizers-0.20.0-cp311-none-win32.whl", hash = "sha256:a4bb8b40ba9eefa621fdcabf04a74aa6038ae3be0c614c6458bd91a4697a452f"},
|
||||
{file = "tokenizers-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:2b709d371f1fe60a28ef0c5c67815952d455ca7f34dbe7197eaaed3cc54b658e"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:15c81a17d0d66f4987c6ca16f4bea7ec253b8c7ed1bb00fdc5d038b1bb56e714"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a531cdf1fb6dc41c984c785a3b299cb0586de0b35683842a3afbb1e5207f910"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06caabeb4587f8404e0cd9d40f458e9cba3e815c8155a38e579a74ff3e2a4301"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8768f964f23f5b9f50546c0369c75ab3262de926983888bbe8b98be05392a79c"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:626403860152c816f97b649fd279bd622c3d417678c93b4b1a8909b6380b69a8"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c1b88fa9e5ff062326f4bf82681da5a96fca7104d921a6bd7b1e6fcf224af26"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d7e559436a07dc547f22ce1101f26d8b2fad387e28ec8e7e1e3b11695d681d8"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48afb75e50449848964e4a67b0da01261dd3aa8df8daecf10db8fd7f5b076eb"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:baf5d0e1ff44710a95eefc196dd87666ffc609fd447c5e5b68272a7c3d342a1d"},
|
||||
{file = "tokenizers-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e5e56df0e8ed23ba60ae3848c3f069a0710c4b197218fe4f89e27eba38510768"},
|
||||
{file = "tokenizers-0.20.0-cp312-none-win32.whl", hash = "sha256:ec53e5ecc142a82432f9c6c677dbbe5a2bfee92b8abf409a9ecb0d425ee0ce75"},
|
||||
{file = "tokenizers-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:f18661ece72e39c0dfaa174d6223248a15b457dbd4b0fc07809b8e6d3ca1a234"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:f7065b1084d8d1a03dc89d9aad69bcbc8415d4bc123c367063eb32958cd85054"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e5d4069e4714e3f7ba0a4d3d44f9d84a432cd4e4aa85c3d7dd1f51440f12e4a1"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:799b808529e54b7e1a36350bda2aeb470e8390e484d3e98c10395cee61d4e3c6"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f9baa027cc8a281ad5f7725a93c204d7a46986f88edbe8ef7357f40a23fb9c7"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:010ec7f3f7a96adc4c2a34a3ada41fa14b4b936b5628b4ff7b33791258646c6b"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98d88f06155335b14fd78e32ee28ca5b2eb30fced4614e06eb14ae5f7fba24ed"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e13eb000ef540c2280758d1b9cfa5fe424b0424ae4458f440e6340a4f18b2638"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fab3cf066ff426f7e6d70435dc28a9ff01b2747be83810e397cba106f39430b0"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:39fa3761b30a89368f322e5daf4130dce8495b79ad831f370449cdacfb0c0d37"},
|
||||
{file = "tokenizers-0.20.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c8da0fba4d179ddf2607821575998df3c294aa59aa8df5a6646dc64bc7352bce"},
|
||||
{file = "tokenizers-0.20.0-cp37-none-win32.whl", hash = "sha256:fada996d6da8cf213f6e3c91c12297ad4f6cdf7a85c2fadcd05ec32fa6846fcd"},
|
||||
{file = "tokenizers-0.20.0-cp37-none-win_amd64.whl", hash = "sha256:7d29aad702279e0760c265fcae832e89349078e3418dd329732d4503259fd6bd"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:099c68207f3ef0227ecb6f80ab98ea74de559f7b124adc7b17778af0250ee90a"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:68012d8a8cddb2eab3880870d7e2086cb359c7f7a2b03f5795044f5abff4e850"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9253bdd209c6aee168deca7d0e780581bf303e0058f268f9bb06859379de19b6"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f868600ddbcb0545905ed075eb7218a0756bf6c09dae7528ea2f8436ebd2c93"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a9643d9c8c5f99b6aba43fd10034f77cc6c22c31f496d2f0ee183047d948fa0"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c375c6a889aeab44734028bc65cc070acf93ccb0f9368be42b67a98e1063d3f6"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e359f852328e254f070bbd09a19a568421d23388f04aad9f2fb7da7704c7228d"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d98b01a309d4387f3b1c1dd68a8b8136af50376cf146c1b7e8d8ead217a5be4b"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:459f7537119554c2899067dec1ac74a00d02beef6558f4ee2e99513bf6d568af"},
|
||||
{file = "tokenizers-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:392b87ec89452628c045c9f2a88bc2a827f4c79e7d84bc3b72752b74c2581f70"},
|
||||
{file = "tokenizers-0.20.0-cp38-none-win32.whl", hash = "sha256:55a393f893d2ed4dd95a1553c2e42d4d4086878266f437b03590d3f81984c4fe"},
|
||||
{file = "tokenizers-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:30ffe33c5c2f2aab8e9a3340d0110dd9f7ace7eec7362e20a697802306bd8068"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:aa2d4a6fed2a7e3f860c7fc9d48764bb30f2649d83915d66150d6340e06742b8"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b5ef0f814084a897e9071fc4a868595f018c5c92889197bdc4bf19018769b148"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1e1b791e8c3bf4c4f265f180dadaff1c957bf27129e16fdd5e5d43c2d3762c"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b69e55e481459c07885263743a0d3c18d52db19bae8226a19bcca4aaa213fff"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4806b4d82e27a2512bc23057b2986bc8b85824914286975b84d8105ff40d03d9"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9859e9ef13adf5a473ccab39d31bff9c550606ae3c784bf772b40f615742a24f"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef703efedf4c20488a8eb17637b55973745b27997ff87bad88ed499b397d1144"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eec0061bab94b1841ab87d10831fdf1b48ebaed60e6d66d66dbe1d873f92bf5"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:980f3d0d7e73f845b69087f29a63c11c7eb924c4ad6b358da60f3db4cf24bdb4"},
|
||||
{file = "tokenizers-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c157550a2f3851b29d7fdc9dc059fcf81ff0c0fc49a1e5173a89d533ed043fa"},
|
||||
{file = "tokenizers-0.20.0-cp39-none-win32.whl", hash = "sha256:8a3d2f4d08608ec4f9895ec25b4b36a97f05812543190a5f2c3cd19e8f041e5a"},
|
||||
{file = "tokenizers-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:d90188d12afd0c75e537f9a1d92f9c7375650188ee4f48fdc76f9e38afbd2251"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d68e15f1815357b059ec266062340c343ea7f98f7f330602df81ffa3474b6122"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:23f9ecec637b9bc80da5f703808d29ed5329e56b5aa8d791d1088014f48afadc"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f830b318ee599e3d0665b3e325f85bc75ee2d2ca6285f52e439dc22b64691580"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3dc750def789cb1de1b5a37657919545e1d9ffa667658b3fa9cb7862407a1b8"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e26e6c755ae884c2ea6135cd215bdd0fccafe4ee62405014b8c3cd19954e3ab9"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a1158c7174f427182e08baa2a8ded2940f2b4a3e94969a85cc9cfd16004cbcea"},
|
||||
{file = "tokenizers-0.20.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:6324826287a3fc198898d3dcf758fe4a8479e42d6039f4c59e2cedd3cf92f64e"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7d8653149405bb0c16feaf9cfee327fdb6aaef9dc2998349fec686f35e81c4e2"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a2dc1e402a155e97309287ca085c80eb1b7fab8ae91527d3b729181639fa51"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bef67b20aa6e5f7868c42c7c5eae4d24f856274a464ae62e47a0f2cccec3da"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da06e397182ff53789c506c7833220c192952c57e1581a53f503d8d953e2d67e"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:302f7e11a14814028b7fc88c45a41f1bbe9b5b35fd76d6869558d1d1809baa43"},
|
||||
{file = "tokenizers-0.20.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:055ec46e807b875589dfbe3d9259f9a6ee43394fb553b03b3d1e9541662dbf25"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e3144b8acebfa6ae062e8f45f7ed52e4b50fb6c62f93afc8871b525ab9fdcab3"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b52aa3fd14b2a07588c00a19f66511cff5cca8f7266ca3edcdd17f3512ad159f"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b8cf52779ffc5d4d63a0170fbeb512372bad0dd014ce92bbb9149756c831124"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:983a45dd11a876124378dae71d6d9761822199b68a4c73f32873d8cdaf326a5b"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6b819c9a19831ebec581e71a7686a54ab45d90faf3842269a10c11d746de0c"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e738cfd80795fcafcef89c5731c84b05638a4ab3f412f97d5ed7765466576eb1"},
|
||||
{file = "tokenizers-0.20.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c8842c7be2fadb9c9edcee233b1b7fe7ade406c99b0973f07439985c1c1d0683"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e47a82355511c373a4a430c4909dc1e518e00031207b1fec536c49127388886b"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9afbf359004551179a5db19424180c81276682773cff2c5d002f6eaaffe17230"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07eaa8799a92e6af6f472c21a75bf71575de2af3c0284120b7a09297c0de2f3"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0994b2e5fc53a301071806bc4303e4bc3bdc3f490e92a21338146a36746b0872"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6466e0355b603d10e3cc3d282d350b646341b601e50969464a54939f9848d0"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1e86594c2a433cb1ea09cfbe596454448c566e57ee8905bd557e489d93e89986"},
|
||||
{file = "tokenizers-0.20.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3e14cdef1efa96ecead6ea64a891828432c3ebba128bdc0596e3059fea104ef3"},
|
||||
{file = "tokenizers-0.20.0.tar.gz", hash = "sha256:39d7acc43f564c274085cafcd1dae9d36f332456de1a31970296a6b8da4eac8d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
huggingface-hub = ">=0.16.4,<1.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["tokenizers[testing]"]
|
||||
docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"]
|
||||
testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
@@ -3141,13 +3344,13 @@ zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.32.0"
|
||||
version = "0.30.6"
|
||||
description = "The lightning-fast ASGI server."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"},
|
||||
{file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"},
|
||||
{file = "uvicorn-0.30.6-py3-none-any.whl", hash = "sha256:65fd46fe3fda5bdc1b03b94eb634923ff18cd35b2f084813ea79d1f103f711b5"},
|
||||
{file = "uvicorn-0.30.6.tar.gz", hash = "sha256:4b15decdda1e72be08209e860a1e10e92439ad5b97cf44cc945fcbee66fc5788"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3677,4 +3880,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "b761f200e8ad7560321fca7bbefbe79377740952ace5dcbecf0371bb8aa16df1"
|
||||
content-hash = "bbad5245c6bd3cd1d93d9f047b65e40beda081d83c2eed59eed508c41a9b5ff1"
|
||||
|
||||
@@ -10,41 +10,41 @@ packages = [{ include = "backend" }]
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
aio-pika = "^9.4.3"
|
||||
anthropic = "^0.39.0"
|
||||
anthropic = "^0.25.1"
|
||||
apscheduler = "^3.10.4"
|
||||
autogpt-libs = { path = "../autogpt_libs", develop = true }
|
||||
click = "^8.1.7"
|
||||
croniter = "^5.0.1"
|
||||
discord-py = "^2.4.0"
|
||||
fastapi = "^0.115.4"
|
||||
fastapi = "^0.109.0"
|
||||
feedparser = "^6.0.11"
|
||||
flake8 = "^7.0.0"
|
||||
google-api-python-client = "^2.151.0"
|
||||
google-api-python-client = "^2.142.0"
|
||||
google-auth-oauthlib = "^1.2.1"
|
||||
groq = "^0.11.0"
|
||||
groq = "^0.8.0"
|
||||
jinja2 = "^3.1.4"
|
||||
jsonref = "^1.1.0"
|
||||
jsonschema = "^4.22.0"
|
||||
ollama = "^0.3.0"
|
||||
openai = "^1.54.1"
|
||||
praw = "~7.8.1"
|
||||
prisma = "^0.15.0"
|
||||
openai = "^1.35.7"
|
||||
praw = "~7.7.1"
|
||||
prisma = "^0.13.1"
|
||||
psutil = "^6.1.0"
|
||||
pydantic = "^2.7.2"
|
||||
pydantic-settings = "^2.3.4"
|
||||
pyro5 = "^5.15"
|
||||
pytest = "^8.2.1"
|
||||
pytest-asyncio = "^0.24.0"
|
||||
pytest-asyncio = "^0.23.7"
|
||||
python-dotenv = "^1.0.1"
|
||||
redis = "^5.2.0"
|
||||
sentry-sdk = "2.18.0"
|
||||
redis = "^5.0.8"
|
||||
sentry-sdk = "2.17.0"
|
||||
supabase = "^2.7.2"
|
||||
tenacity = "^9.0.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.32.0" }
|
||||
uvicorn = { extras = ["standard"], version = "^0.30.1" }
|
||||
websockets = "^13.1"
|
||||
youtube-transcript-api = "^0.6.2"
|
||||
googlemaps = "^4.10.0"
|
||||
replicate = "^1.0.3"
|
||||
replicate = "^0.34.1"
|
||||
pinecone = "^5.3.1"
|
||||
cryptography = "^43.0.3"
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
||||
@@ -52,6 +52,11 @@ model AgentGraph {
|
||||
AgentGraphExecution AgentGraphExecution[]
|
||||
AgentGraphExecutionSchedule AgentGraphExecutionSchedule[]
|
||||
|
||||
// All sub-graphs are defined within this 1-level depth list (even if it's a nested graph).
|
||||
AgentSubGraphs AgentGraph[] @relation("AgentSubGraph")
|
||||
agentGraphParentId String?
|
||||
AgentGraphParent AgentGraph? @relation("AgentSubGraph", fields: [agentGraphParentId, version], references: [id, version], onDelete: Cascade)
|
||||
|
||||
@@id(name: "graphVersionId", [id, version])
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock, StoreValueBlock
|
||||
from backend.data.block import BlockSchema
|
||||
from backend.blocks.basic import AgentInputBlock, StoreValueBlock
|
||||
from backend.data.graph import Graph, Link, Node
|
||||
from backend.data.model import SchemaField
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.server.model import CreateGraph
|
||||
from backend.util.test import SpinTestServer
|
||||
@@ -18,8 +15,9 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
Test the creation of a graph with nodes and links.
|
||||
|
||||
This test ensures that:
|
||||
1. A graph can be successfully created with valid connections.
|
||||
2. The created graph has the correct structure and properties.
|
||||
1. Nodes from different subgraphs cannot be directly connected.
|
||||
2. A graph can be successfully created with valid connections.
|
||||
3. The created graph has the correct structure and properties.
|
||||
|
||||
Args:
|
||||
server (SpinTestServer): The test server instance.
|
||||
@@ -39,13 +37,23 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
links=[
|
||||
Link(
|
||||
source_id="node_1",
|
||||
sink_id="node_2",
|
||||
sink_id="node_3",
|
||||
source_name="output",
|
||||
sink_name="name",
|
||||
sink_name="input",
|
||||
),
|
||||
],
|
||||
subgraphs={"subgraph_1": ["node_2", "node_3"]},
|
||||
)
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
|
||||
try:
|
||||
await server.agent_server.test_create_graph(create_graph, DEFAULT_USER_ID)
|
||||
assert False, "Should not be able to connect nodes from different subgraphs"
|
||||
except ValueError as e:
|
||||
assert "different subgraph" in str(e)
|
||||
|
||||
# Change node_1 <-> node_3 link to node_1 <-> node_2 (input for subgraph_1)
|
||||
graph.links[0].sink_id = "node_2"
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
@@ -65,6 +73,9 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
assert links[0].source_id in {nodes[0].id, nodes[1].id, nodes[2].id}
|
||||
assert links[0].sink_id in {nodes[0].id, nodes[1].id, nodes[2].id}
|
||||
|
||||
assert len(created_graph.subgraphs) == 1
|
||||
assert len(created_graph.subgraph_map) == len(created_graph.nodes) == 3
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_get_input_schema(server: SpinTestServer):
|
||||
@@ -80,54 +91,90 @@ async def test_get_input_schema(server: SpinTestServer):
|
||||
server (SpinTestServer): The test server instance.
|
||||
"""
|
||||
value_block = StoreValueBlock().id
|
||||
input_block = AgentInputBlock().id
|
||||
output_block = AgentOutputBlock().id
|
||||
|
||||
graph = Graph(
|
||||
name="TestInputSchema",
|
||||
description="Test input schema",
|
||||
nodes=[
|
||||
Node(
|
||||
id="node_0_a",
|
||||
block_id=input_block,
|
||||
input_default={"name": "in_key_a", "title": "Key A", "value": "A"},
|
||||
metadata={"id": "node_0_a"},
|
||||
),
|
||||
Node(
|
||||
id="node_0_b",
|
||||
block_id=input_block,
|
||||
input_default={"name": "in_key_b", "advanced": True},
|
||||
metadata={"id": "node_0_b"},
|
||||
),
|
||||
Node(id="node_1", block_id=value_block, metadata={"id": "node_1"}),
|
||||
Node(
|
||||
id="node_2",
|
||||
block_id=output_block,
|
||||
input_default={
|
||||
"name": "out_key",
|
||||
"description": "This is an output key",
|
||||
},
|
||||
metadata={"id": "node_2"},
|
||||
),
|
||||
Node(id="node_1", block_id=value_block),
|
||||
],
|
||||
links=[],
|
||||
)
|
||||
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
input_schema = created_graph.get_input_schema()
|
||||
|
||||
assert len(input_schema) == 1
|
||||
|
||||
assert input_schema[0].title == "Input"
|
||||
assert input_schema[0].node_id == created_graph.nodes[0].id
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_get_input_schema_none_required(server: SpinTestServer):
|
||||
"""
|
||||
Test the get_input_schema method when no inputs are required.
|
||||
|
||||
This test ensures that:
|
||||
1. A graph can be created with a node that has a default input value.
|
||||
2. The input schema of the created graph is empty when all inputs have default values.
|
||||
|
||||
Args:
|
||||
server (SpinTestServer): The test server instance.
|
||||
"""
|
||||
value_block = StoreValueBlock().id
|
||||
|
||||
graph = Graph(
|
||||
name="TestInputSchema",
|
||||
description="Test input schema",
|
||||
nodes=[
|
||||
Node(id="node_1", block_id=value_block, input_default={"input": "value"}),
|
||||
],
|
||||
links=[],
|
||||
)
|
||||
|
||||
create_graph = CreateGraph(graph=graph)
|
||||
created_graph = await server.agent_server.test_create_graph(
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
input_schema = created_graph.get_input_schema()
|
||||
|
||||
assert input_schema == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_get_input_schema_with_linked_blocks(server: SpinTestServer):
|
||||
"""
|
||||
Test the get_input_schema method with linked blocks.
|
||||
|
||||
This test ensures that:
|
||||
1. A graph can be created with multiple nodes and links between them.
|
||||
2. The input schema correctly identifies required inputs for linked blocks.
|
||||
3. Inputs that are satisfied by links are not included in the input schema.
|
||||
|
||||
Args:
|
||||
server (SpinTestServer): The test server instance.
|
||||
"""
|
||||
value_block = StoreValueBlock().id
|
||||
|
||||
graph = Graph(
|
||||
name="TestInputSchemaLinkedBlocks",
|
||||
description="Test input schema with linked blocks",
|
||||
nodes=[
|
||||
Node(id="node_1", block_id=value_block),
|
||||
Node(id="node_2", block_id=value_block),
|
||||
],
|
||||
links=[
|
||||
Link(
|
||||
source_id="node_0_a",
|
||||
sink_id="node_1",
|
||||
source_name="result",
|
||||
sink_name="input",
|
||||
),
|
||||
Link(
|
||||
source_id="node_0_b",
|
||||
sink_id="node_1",
|
||||
source_name="result",
|
||||
sink_name="input",
|
||||
),
|
||||
Link(
|
||||
source_id="node_1",
|
||||
sink_id="node_2",
|
||||
source_name="output",
|
||||
sink_name="value",
|
||||
sink_name="data",
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -137,21 +184,25 @@ async def test_get_input_schema(server: SpinTestServer):
|
||||
create_graph, DEFAULT_USER_ID
|
||||
)
|
||||
|
||||
class ExpectedInputSchema(BlockSchema):
|
||||
in_key_a: Any = SchemaField(title="Key A", default="A", advanced=False)
|
||||
in_key_b: Any = SchemaField(title="in_key_b", advanced=True)
|
||||
input_schema = created_graph.get_input_schema()
|
||||
|
||||
class ExpectedOutputSchema(BlockSchema):
|
||||
out_key: Any = SchemaField(
|
||||
description="This is an output key",
|
||||
title="out_key",
|
||||
advanced=False,
|
||||
)
|
||||
assert len(input_schema) == 2
|
||||
|
||||
input_schema = created_graph.input_schema
|
||||
input_schema["title"] = "ExpectedInputSchema"
|
||||
assert input_schema == ExpectedInputSchema.jsonschema()
|
||||
node_1_input = next(
|
||||
(item for item in input_schema if item.node_id == created_graph.nodes[0].id),
|
||||
None,
|
||||
)
|
||||
node_2_input = next(
|
||||
(item for item in input_schema if item.node_id == created_graph.nodes[1].id),
|
||||
None,
|
||||
)
|
||||
|
||||
output_schema = created_graph.output_schema
|
||||
output_schema["title"] = "ExpectedOutputSchema"
|
||||
assert output_schema == ExpectedOutputSchema.jsonschema()
|
||||
assert node_1_input is not None
|
||||
assert node_2_input is not None
|
||||
assert node_1_input.title == "Input"
|
||||
assert node_2_input.title == "Input"
|
||||
|
||||
assert not any(
|
||||
item.title == "data" and item.node_id == created_graph.nodes[1].id
|
||||
for item in input_schema
|
||||
)
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from backend.util.request import validate_url
|
||||
|
||||
|
||||
def test_validate_url():
|
||||
with pytest.raises(ValueError):
|
||||
validate_url("localhost", [])
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
validate_url("192.168.1.1", [])
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
validate_url("127.0.0.1", [])
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
validate_url("0.0.0.0", [])
|
||||
|
||||
validate_url("google.com", [])
|
||||
validate_url("github.com", [])
|
||||
validate_url("http://github.com", [])
|
||||
165
autogpt_platform/docker-compose.full.yml
Normal file
165
autogpt_platform/docker-compose.full.yml
Normal file
@@ -0,0 +1,165 @@
|
||||
networks:
|
||||
app-network:
|
||||
name: app-network
|
||||
shared-network:
|
||||
name: shared-network
|
||||
|
||||
volumes:
|
||||
db-config:
|
||||
|
||||
x-agpt-services:
|
||||
&agpt-services
|
||||
networks:
|
||||
- app-network
|
||||
- shared-network
|
||||
|
||||
x-supabase-services:
|
||||
&supabase-services
|
||||
networks:
|
||||
- app-network
|
||||
- shared-network
|
||||
|
||||
services:
|
||||
# AGPT services
|
||||
migrate:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: migrate
|
||||
|
||||
redis:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: redis
|
||||
|
||||
rest_server:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: rest_server
|
||||
|
||||
executor:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: executor
|
||||
|
||||
websocket_server:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: websocket_server
|
||||
|
||||
market:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: market
|
||||
|
||||
market-migrations:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: market-migrations
|
||||
|
||||
# frontend:
|
||||
# <<: *agpt-services
|
||||
# extends:
|
||||
# file: ./docker-compose.platform.yml
|
||||
# service: frontend
|
||||
|
||||
# Supabase services
|
||||
studio:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: studio
|
||||
|
||||
kong:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: kong
|
||||
|
||||
auth:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: auth
|
||||
environment:
|
||||
GOTRUE_MAILER_AUTOCONFIRM: true
|
||||
|
||||
rest:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: rest
|
||||
|
||||
realtime:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: realtime
|
||||
|
||||
storage:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: storage
|
||||
|
||||
imgproxy:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: imgproxy
|
||||
|
||||
meta:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: meta
|
||||
|
||||
functions:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: functions
|
||||
|
||||
analytics:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: analytics
|
||||
|
||||
db:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: db
|
||||
vector:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: vector
|
||||
|
||||
deps:
|
||||
<<: *supabase-services
|
||||
profiles:
|
||||
- local
|
||||
image: busybox
|
||||
command: /bin/true
|
||||
depends_on:
|
||||
- studio
|
||||
- kong
|
||||
- auth
|
||||
- rest
|
||||
- realtime
|
||||
- storage
|
||||
- imgproxy
|
||||
- meta
|
||||
- functions
|
||||
- analytics
|
||||
- db
|
||||
- vector
|
||||
- redis
|
||||
@@ -69,7 +69,7 @@ services:
|
||||
- DATABASEMANAGER_HOST=executor
|
||||
- FRONTEND_BASE_URL=http://localhost:3000
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
- ENCRYPTION_KEY=some-long-random-string-encryption-key
|
||||
ports:
|
||||
- "8006:8006"
|
||||
- "8003:8003" # execution scheduler
|
||||
|
||||
@@ -51,6 +51,7 @@ services:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: websocket_server
|
||||
|
||||
# Agent Store v2 will be integrated with the rest service
|
||||
market:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
@@ -70,12 +71,14 @@ services:
|
||||
# service: frontend
|
||||
|
||||
# Supabase services
|
||||
studio:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: studio
|
||||
# Uncomment theis if you want to use the ui
|
||||
# studio:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: studio
|
||||
|
||||
# Required for auth api routing
|
||||
kong:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
@@ -90,42 +93,43 @@ services:
|
||||
environment:
|
||||
GOTRUE_MAILER_AUTOCONFIRM: true
|
||||
|
||||
rest:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: rest
|
||||
# rest:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: rest
|
||||
|
||||
realtime:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: realtime
|
||||
# realtime:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: realtime
|
||||
|
||||
storage:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: storage
|
||||
# storage:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: storage
|
||||
|
||||
imgproxy:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: imgproxy
|
||||
# imgproxy:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: imgproxy
|
||||
|
||||
meta:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: meta
|
||||
# meta:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: meta
|
||||
|
||||
functions:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: functions
|
||||
# functions:
|
||||
# <<: *supabase-services
|
||||
# extends:
|
||||
# file: ./supabase/docker/docker-compose.yml
|
||||
# service: functions
|
||||
|
||||
# Required for Auth
|
||||
analytics:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
@@ -137,24 +141,31 @@ services:
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: db
|
||||
|
||||
# Required for db
|
||||
vector:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: vector
|
||||
|
||||
deps:
|
||||
<<: *supabase-services
|
||||
profiles:
|
||||
- local
|
||||
image: busybox
|
||||
command: /bin/true
|
||||
depends_on:
|
||||
- studio
|
||||
- kong
|
||||
- auth
|
||||
- meta
|
||||
- analytics
|
||||
- db
|
||||
- vector
|
||||
- redis
|
||||
# deps:
|
||||
# <<: *supabase-services
|
||||
# profiles:
|
||||
# - local
|
||||
# image: busybox
|
||||
# command: /bin/true
|
||||
# depends_on:
|
||||
# - studio
|
||||
# - kong
|
||||
# - auth
|
||||
# - rest
|
||||
# - realtime
|
||||
# - storage
|
||||
# - imgproxy
|
||||
# - meta
|
||||
# - functions
|
||||
# - analytics
|
||||
# - db
|
||||
# - vector
|
||||
# - redis
|
||||
|
||||
@@ -49,10 +49,8 @@
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "1.0.4",
|
||||
"cookie": "0.7.0",
|
||||
"date-fns": "^4.1.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"elliptic": "6.6.0",
|
||||
"lucide-react": "^0.454.0",
|
||||
"moment": "^2.30.1",
|
||||
"next": "^14.2.13",
|
||||
|
||||
@@ -626,13 +626,10 @@ export function CustomNode({
|
||||
|
||||
<div className="flex w-full flex-col">
|
||||
<div className="flex flex-row items-center justify-between">
|
||||
<div className="font-roboto flex items-center px-3 text-lg font-semibold">
|
||||
<div className="font-roboto px-3 text-lg font-semibold">
|
||||
{beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
<div className="px-2 text-xs text-gray-500">
|
||||
#{id.split("-")[0]}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{blockCost && (
|
||||
|
||||
@@ -18,7 +18,7 @@ export async function NavBar() {
|
||||
const { user } = await getServerUser();
|
||||
|
||||
return (
|
||||
<header className="sticky top-0 z-50 mx-4 flex h-16 select-none items-center gap-4 border border-gray-300 bg-background p-3 md:rounded-b-2xl md:px-6 md:shadow">
|
||||
<header className="sticky top-0 z-50 mx-4 flex h-16 items-center gap-4 border border-gray-300 bg-background p-3 md:rounded-b-2xl md:px-6 md:shadow">
|
||||
<div className="flex flex-1 items-center gap-4">
|
||||
<Sheet>
|
||||
<SheetTrigger asChild>
|
||||
|
||||
@@ -32,7 +32,7 @@ const PrimaryActionBar: React.FC<PrimaryActionBarProps> = ({
|
||||
const runButtonOnClick = !isRunning ? onClickRunAgent : requestStopRun;
|
||||
|
||||
return (
|
||||
<div className="absolute bottom-0 left-1/2 z-50 flex w-fit -translate-x-1/2 transform select-none items-center justify-center p-4">
|
||||
<div className="absolute bottom-0 left-1/2 z-50 flex w-fit -translate-x-1/2 transform items-center justify-center p-4">
|
||||
<div className={`flex gap-4`}>
|
||||
<Tooltip key="ViewOutputs" delayDuration={500}>
|
||||
<TooltipTrigger asChild>
|
||||
|
||||
@@ -48,7 +48,7 @@ const TallyPopupSimple = () => {
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed bottom-1 right-6 z-50 hidden select-none items-center gap-4 p-3 transition-all duration-300 ease-in-out md:flex">
|
||||
<div className="fixed bottom-1 right-6 z-50 hidden items-center gap-4 p-3 transition-all duration-300 ease-in-out md:flex">
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={resetTutorial}
|
||||
|
||||
@@ -352,14 +352,9 @@ const NodeKeyValueInput: FC<{
|
||||
const defaultEntries = new Map(
|
||||
Object.entries(entries ?? schema.default ?? {}),
|
||||
);
|
||||
const prefix = `${selfKey}_#_`;
|
||||
connections
|
||||
.filter((c) => c.targetHandle.startsWith(prefix) && c.target === nodeId)
|
||||
.map((c) => c.targetHandle.slice(prefix.length))
|
||||
.forEach((k) => !defaultEntries.has(k) && defaultEntries.set(k, ""));
|
||||
|
||||
return Array.from(defaultEntries, ([key, value]) => ({ key, value }));
|
||||
}, [entries, schema.default, connections, nodeId, selfKey]);
|
||||
}, [entries, schema.default]);
|
||||
|
||||
const [keyValuePairs, setKeyValuePairs] = useState<
|
||||
{ key: string; value: string | number | null }[]
|
||||
@@ -502,18 +497,6 @@ const NodeArrayInput: FC<{
|
||||
displayName,
|
||||
}) => {
|
||||
entries ??= schema.default ?? [];
|
||||
|
||||
const prefix = `${selfKey}_$_`;
|
||||
connections
|
||||
.filter((c) => c.targetHandle.startsWith(prefix) && c.target === nodeId)
|
||||
.map((c) => parseInt(c.targetHandle.slice(prefix.length)))
|
||||
.filter((c) => !isNaN(c))
|
||||
.forEach(
|
||||
(c) =>
|
||||
entries.length <= c &&
|
||||
entries.push(...Array(c - entries.length + 1).fill("")),
|
||||
);
|
||||
|
||||
const isItemObject = "items" in schema && "properties" in schema.items!;
|
||||
const error =
|
||||
typeof errors[selfKey] === "string" ? errors[selfKey] : undefined;
|
||||
@@ -768,7 +751,7 @@ const NodeBooleanInput: FC<{
|
||||
<div className={className}>
|
||||
<div className="nodrag flex items-center">
|
||||
<Switch
|
||||
defaultChecked={value}
|
||||
checked={value}
|
||||
onCheckedChange={(v) => handleInputChange(selfKey, v)}
|
||||
/>
|
||||
<span className="ml-3">{displayName}</span>
|
||||
|
||||
@@ -487,16 +487,7 @@ export default function useAgentGraph(
|
||||
},
|
||||
);
|
||||
})
|
||||
.catch((error) => {
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : String(error);
|
||||
toast({
|
||||
variant: "destructive",
|
||||
title: "Error saving agent",
|
||||
description: errorMessage,
|
||||
});
|
||||
setSaveRunRequest({ request: "run", state: "error" });
|
||||
});
|
||||
.catch(() => setSaveRunRequest({ request: "run", state: "error" }));
|
||||
|
||||
processedUpdates.current = processedUpdates.current = [];
|
||||
}
|
||||
@@ -519,7 +510,6 @@ export default function useAgentGraph(
|
||||
}
|
||||
}, [
|
||||
api,
|
||||
toast,
|
||||
saveRunRequest,
|
||||
savedAgent,
|
||||
nodesSyncedWithSavedAgent,
|
||||
@@ -600,7 +590,8 @@ export default function useAgentGraph(
|
||||
[availableNodes],
|
||||
);
|
||||
|
||||
const _saveAgent = useCallback(
|
||||
const _saveAgent = (
|
||||
() =>
|
||||
async (asTemplate: boolean = false) => {
|
||||
//FIXME frontend ids should be resolved better (e.g. returned from the server)
|
||||
// currently this relays on block_id and position
|
||||
@@ -754,20 +745,8 @@ export default function useAgentGraph(
|
||||
},
|
||||
}));
|
||||
});
|
||||
},
|
||||
[
|
||||
api,
|
||||
nodes,
|
||||
edges,
|
||||
pathname,
|
||||
router,
|
||||
searchParams,
|
||||
savedAgent,
|
||||
agentName,
|
||||
agentDescription,
|
||||
prepareNodeInputData,
|
||||
],
|
||||
);
|
||||
}
|
||||
)();
|
||||
|
||||
const saveAgent = useCallback(
|
||||
async (asTemplate: boolean = false) => {
|
||||
@@ -784,7 +763,18 @@ export default function useAgentGraph(
|
||||
});
|
||||
}
|
||||
},
|
||||
[_saveAgent, toast],
|
||||
[
|
||||
api,
|
||||
nodes,
|
||||
edges,
|
||||
pathname,
|
||||
router,
|
||||
searchParams,
|
||||
savedAgent,
|
||||
agentName,
|
||||
agentDescription,
|
||||
prepareNodeInputData,
|
||||
],
|
||||
);
|
||||
|
||||
const requestSave = useCallback(
|
||||
|
||||
@@ -5091,11 +5091,6 @@ convert-source-map@^2.0.0:
|
||||
resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz"
|
||||
integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==
|
||||
|
||||
cookie@0.7.0:
|
||||
version "0.7.0"
|
||||
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.0.tgz#2148f68a77245d5c2c0005d264bc3e08cfa0655d"
|
||||
integrity sha512-qCf+V4dtlNhSRXGAZatc1TasyFO6GjohcOul807YOb5ik3+kQSnb4d7iajeCL8QHaJ4uZEjCgiCJerKXwdRVlQ==
|
||||
|
||||
cookie@^0.6.0:
|
||||
version "0.6.0"
|
||||
resolved "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz"
|
||||
@@ -5721,19 +5716,6 @@ electron-to-chromium@^1.5.28:
|
||||
resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.36.tgz"
|
||||
integrity sha512-HYTX8tKge/VNp6FGO+f/uVDmUkq+cEfcxYhKf15Akc4M5yxt5YmorwlAitKWjWhWQnKcDRBAQKXkhqqXMqcrjw==
|
||||
|
||||
elliptic@6.6.0:
|
||||
version "6.6.0"
|
||||
resolved "https://registry.yarnpkg.com/elliptic/-/elliptic-6.6.0.tgz#5919ec723286c1edf28685aa89261d4761afa210"
|
||||
integrity sha512-dpwoQcLc/2WLQvJvLRHKZ+f9FgOdjnq11rurqwekGQygGPsYSK29OMMD2WalatiqQ+XGFDglTNixpPfI+lpaAA==
|
||||
dependencies:
|
||||
bn.js "^4.11.9"
|
||||
brorand "^1.1.0"
|
||||
hash.js "^1.0.0"
|
||||
hmac-drbg "^1.0.1"
|
||||
inherits "^2.0.4"
|
||||
minimalistic-assert "^1.0.1"
|
||||
minimalistic-crypto-utils "^1.0.1"
|
||||
|
||||
elliptic@^6.5.3, elliptic@^6.5.5:
|
||||
version "6.5.7"
|
||||
resolved "https://registry.npmjs.org/elliptic/-/elliptic-6.5.7.tgz"
|
||||
|
||||
@@ -111,8 +111,6 @@ env:
|
||||
AGENTSERVER_HOST: "autogpt-server.prod-agpt.svc.cluster.local"
|
||||
EXECUTIONMANAGER_HOST: "autogpt-server-executor.prod-agpt.svc.cluster.local"
|
||||
DATABASEMANAGER_HOST: "autogpt-server-executor.prod-agpt.svc.cluster.local"
|
||||
PYRO_CLIENT_COMM_TIMEOUT: 15
|
||||
PYRO_CLIENT_COMM_RETRY: 3
|
||||
|
||||
secrets:
|
||||
REPLICATE_API_KEY: "AgCPCgcYb+tE8/k45Y7/my4G2jWPCuEMTXJIn1fG1q4x4ZJPFzb43m7Uqtwn23NkmUZ5Qvh8BXedrtHwxapuYzw/P6c7xK66xfLKRbTWtYk4twS3sxPb+pt1FXY4USEjj5yeIFduybkqhE2QfnGoyrbDZ4Bz3AIgnrRD0Ee5m9u5yNZTPmJqZZqg4MRdUBCxCWIJBkW6DCE9nCPAQeNPD6e+lZ1j+/LocT2HX/ZlcsPXCxbn6wkxoyLqA0vUKSG9azS6oLvn0/3Cb01ozG8S2OEAqWIImFqhKGMfGqL6jSZWln43cmQdMTzSzM+HiprA9JHjZqGK7wOV9HZvSR+58IXoJGPBEIM7jIg5KqPjpZY4KFZBp5OiiRRYu+nCbuD+KsY/7ogjPHjbi1rpR8TrtXdzWNmwsTTmjytB/KEqeUpLWOEPgArFPyrNTS5/nmREH7r9jNEhfIRdTlS3IVGGXp/VN8napbNND1GDyzowvF771neq7/zTmfCRCJ4J0gwPNKM5rzOuRW+caEf2qOFBKIldVa/J0PFg5bAgpGL6jhpXHj0Q/+j1s3FA/D2ZebZTPIpKe40It3sWsS/0Qjhbj1GMbL4yUWvGpBSUTk7kZazkaVND1LbhjC+4AolTQdIU4MgW0bkmDn5ZI4a9/dHyLS3lFeYNSQ6vnbz+Id7zB3O0D6/FH8nfAUGL8V+J3eFKMp+G67z+XYH6WGABaNicz41zFBDF5hRax+k/ZziPPlFY0kDc3cAB6pLc"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.11.10-slim-bookworm AS builder
|
||||
FROM python:3.11-slim-buster AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
@@ -35,7 +35,7 @@ COPY autogpt_platform/market /app/autogpt_platform/market
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry run prisma generate
|
||||
|
||||
FROM python:3.11.10-slim-bookworm AS server_dependencies
|
||||
FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
||||
Reference in New Issue
Block a user