mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-09 22:35:54 -05:00
Merge branch 'master' into bently/secrt-881-find-local-businesses-using-google-maps-list-building
This commit is contained in:
5
.github/labeler.yml
vendored
5
.github/labeler.yml
vendored
@@ -25,3 +25,8 @@ platform/frontend:
|
||||
platform/backend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: autogpt_platform/backend/**
|
||||
- all-globs-to-all-files: '!autogpt_platform/backend/backend/blocks/**'
|
||||
|
||||
platform/blocks:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: autogpt_platform/backend/backend/blocks/**
|
||||
|
||||
16
.github/workflows/platform-backend-ci.yml
vendored
16
.github/workflows/platform-backend-ci.yml
vendored
@@ -32,6 +32,14 @@ jobs:
|
||||
python-version: ["3.10"]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: bitnami/redis:6.2
|
||||
env:
|
||||
REDIS_PASSWORD: testpassword
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -96,9 +104,9 @@ jobs:
|
||||
- name: Run pytest with coverage
|
||||
run: |
|
||||
if [[ "${{ runner.debug }}" == "1" ]]; then
|
||||
poetry run pytest -vv -o log_cli=true -o log_cli_level=DEBUG test
|
||||
poetry run pytest -s -vv -o log_cli=true -o log_cli_level=DEBUG test
|
||||
else
|
||||
poetry run pytest -vv test
|
||||
poetry run pytest -s -vv test
|
||||
fi
|
||||
if: success() || (failure() && steps.lint.outcome == 'failure')
|
||||
env:
|
||||
@@ -107,6 +115,10 @@ jobs:
|
||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||
SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||
REDIS_HOST: 'localhost'
|
||||
REDIS_PORT: '6379'
|
||||
REDIS_PASSWORD: 'testpassword'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
PLAIN_OUTPUT: True
|
||||
|
||||
@@ -81,12 +81,17 @@ class SupabaseIntegrationCredentialsStore:
|
||||
]
|
||||
self._set_user_integration_creds(user_id, filtered_credentials)
|
||||
|
||||
async def store_state_token(self, user_id: str, provider: str) -> str:
|
||||
async def store_state_token(
|
||||
self, user_id: str, provider: str, scopes: list[str]
|
||||
) -> str:
|
||||
token = secrets.token_urlsafe(32)
|
||||
expires_at = datetime.now(timezone.utc) + timedelta(minutes=10)
|
||||
|
||||
state = OAuthState(
|
||||
token=token, provider=provider, expires_at=int(expires_at.timestamp())
|
||||
token=token,
|
||||
provider=provider,
|
||||
expires_at=int(expires_at.timestamp()),
|
||||
scopes=scopes,
|
||||
)
|
||||
|
||||
user_metadata = self._get_user_metadata(user_id)
|
||||
@@ -100,6 +105,36 @@ class SupabaseIntegrationCredentialsStore:
|
||||
|
||||
return token
|
||||
|
||||
async def get_any_valid_scopes_from_state_token(
|
||||
self, user_id: str, token: str, provider: str
|
||||
) -> list[str]:
|
||||
"""
|
||||
Get the valid scopes from the OAuth state token. This will return any valid scopes
|
||||
from any OAuth state token for the given provider. If no valid scopes are found,
|
||||
an empty list is returned. DO NOT RELY ON THIS TOKEN TO AUTHENTICATE A USER, AS IT
|
||||
IS TO CHECK IF THE USER HAS GIVEN PERMISSIONS TO THE APPLICATION BEFORE EXCHANGING
|
||||
THE CODE FOR TOKENS.
|
||||
"""
|
||||
user_metadata = self._get_user_metadata(user_id)
|
||||
oauth_states = user_metadata.get("integration_oauth_states", [])
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
valid_state = next(
|
||||
(
|
||||
state
|
||||
for state in oauth_states
|
||||
if state["token"] == token
|
||||
and state["provider"] == provider
|
||||
and state["expires_at"] > now.timestamp()
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if valid_state:
|
||||
return valid_state.get("scopes", [])
|
||||
|
||||
return []
|
||||
|
||||
async def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
|
||||
user_metadata = self._get_user_metadata(user_id)
|
||||
oauth_states = user_metadata.get("integration_oauth_states", [])
|
||||
|
||||
@@ -36,6 +36,15 @@ SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
GITHUB_CLIENT_ID=
|
||||
GITHUB_CLIENT_SECRET=
|
||||
|
||||
# Google OAuth App server credentials - https://console.cloud.google.com/apis/credentials, and enable gmail api and set scopes
|
||||
# https://console.cloud.google.com/apis/credentials/consent ?project=<your_project_id>
|
||||
|
||||
# You'll need to add/enable the following scopes (minimum):
|
||||
# https://console.developers.google.com/apis/api/gmail.googleapis.com/overview ?project=<your_project_id>
|
||||
# https://console.cloud.google.com/apis/library/sheets.googleapis.com/ ?project=<your_project_id>
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
# LLM
|
||||
@@ -80,6 +89,9 @@ GOOGLE_MAPS_API_KEY=
|
||||
# Replicate
|
||||
REPLICATE_API_KEY=
|
||||
|
||||
# Ideogram
|
||||
IDEOGRAM_API_KEY=
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
|
||||
@@ -88,7 +88,7 @@ class FindInDictionaryBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6",
|
||||
id="0e50422c-6dee-4145-83d6-3a5a392f65de",
|
||||
description="Lookup the given key in the input dictionary/object/list and return the value.",
|
||||
input_schema=FindInDictionaryBlock.Input,
|
||||
output_schema=FindInDictionaryBlock.Output,
|
||||
@@ -429,7 +429,7 @@ class NoteBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="31d1064e-7446-4693-o7d4-65e5ca9110d1",
|
||||
id="cc10ff7b-7753-4ff2-9af6-9399b1a7eddc",
|
||||
description="This block is used to display a sticky note with the given text.",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=NoteBlock.Input,
|
||||
|
||||
@@ -24,7 +24,7 @@ class ReadCsvBlock(Block):
|
||||
output_schema=ReadCsvBlock.Output,
|
||||
description="Reads a CSV file and outputs the data as a list of dictionaries and individual rows via rows.",
|
||||
contributors=[ContributorDetails(name="Nicholas Tindle")],
|
||||
categories={BlockCategory.TEXT},
|
||||
categories={BlockCategory.TEXT, BlockCategory.DATA},
|
||||
test_input={
|
||||
"contents": "a, b, c\n1,2,3\n4,5,6",
|
||||
},
|
||||
|
||||
42
autogpt_platform/backend/backend/blocks/decoder_block.py
Normal file
42
autogpt_platform/backend/backend/blocks/decoder_block.py
Normal file
@@ -0,0 +1,42 @@
|
||||
import codecs
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TextDecoderBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(
|
||||
description="A string containing escaped characters to be decoded",
|
||||
placeholder='Your entire text block with \\n and \\" escaped characters',
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
decoded_text: str = SchemaField(
|
||||
description="The decoded text with escape sequences processed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2570e8fe-8447-43ed-84c7-70d657923231",
|
||||
description="Decodes a string containing escape sequences into actual text",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextDecoderBlock.Input,
|
||||
output_schema=TextDecoderBlock.Output,
|
||||
test_input={"text": """Hello\nWorld!\nThis is a \"quoted\" string."""},
|
||||
test_output=[
|
||||
(
|
||||
"decoded_text",
|
||||
"""Hello
|
||||
World!
|
||||
This is a "quoted" string.""",
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
decoded_text = codecs.decode(input_data.text, "unicode_escape")
|
||||
yield "decoded_text", decoded_text
|
||||
except Exception as e:
|
||||
yield "error", f"Error decoding text: {str(e)}"
|
||||
@@ -28,7 +28,7 @@ class ReadDiscordMessagesBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t", # Unique ID for the node
|
||||
id="df06086a-d5ac-4abb-9996-2ad0acb2eff7",
|
||||
input_schema=ReadDiscordMessagesBlock.Input, # Assign input schema
|
||||
output_schema=ReadDiscordMessagesBlock.Output, # Assign output schema
|
||||
description="Reads messages from a Discord channel using a bot token.",
|
||||
@@ -146,7 +146,7 @@ class SendDiscordMessageBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6", # Unique ID for the node
|
||||
id="d0822ab5-9f8a-44a3-8971-531dd0178b6b",
|
||||
input_schema=SendDiscordMessageBlock.Input, # Assign input schema
|
||||
output_schema=SendDiscordMessageBlock.Output, # Assign output schema
|
||||
description="Sends a message to a Discord channel using a bot token.",
|
||||
|
||||
@@ -43,7 +43,7 @@ class SendEmailBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a1234567-89ab-cdef-0123-456789abcdef",
|
||||
id="4335878a-394e-4e67-adf2-919877ff49ae",
|
||||
description="This block sends an email using the provided SMTP credentials.",
|
||||
categories={BlockCategory.OUTPUT},
|
||||
input_schema=SendEmailBlock.Input,
|
||||
|
||||
53
autogpt_platform/backend/backend/blocks/google/_auth.py
Normal file
53
autogpt_platform/backend/backend/blocks/google/_auth.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from typing import Literal
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import OAuth2Credentials
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
secrets = Secrets()
|
||||
GOOGLE_OAUTH_IS_CONFIGURED = bool(
|
||||
secrets.google_client_id and secrets.google_client_secret
|
||||
)
|
||||
|
||||
GoogleCredentials = OAuth2Credentials
|
||||
GoogleCredentialsInput = CredentialsMetaInput[Literal["google"], Literal["oauth2"]]
|
||||
|
||||
|
||||
def GoogleCredentialsField(scopes: list[str]) -> GoogleCredentialsInput:
|
||||
"""
|
||||
Creates a Google credentials input on a block.
|
||||
|
||||
Params:
|
||||
scopes: The authorization scopes needed for the block to work.
|
||||
"""
|
||||
return CredentialsField(
|
||||
provider="google",
|
||||
supported_credential_types={"oauth2"},
|
||||
required_scopes=set(scopes),
|
||||
description="The Google integration requires OAuth2 authentication.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS = OAuth2Credentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="google",
|
||||
access_token=SecretStr("mock-google-access-token"),
|
||||
refresh_token=SecretStr("mock-google-refresh-token"),
|
||||
access_token_expires_at=1234567890,
|
||||
scopes=[
|
||||
"https://www.googleapis.com/auth/gmail.readonly",
|
||||
"https://www.googleapis.com/auth/gmail.send",
|
||||
],
|
||||
title="Mock Google OAuth2 Credentials",
|
||||
username="mock-google-username",
|
||||
refresh_token_expires_at=1234567890,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
522
autogpt_platform/backend/backend/blocks/google/gmail.py
Normal file
522
autogpt_platform/backend/backend/blocks/google/gmail.py
Normal file
@@ -0,0 +1,522 @@
|
||||
import base64
|
||||
from email.utils import parseaddr
|
||||
from typing import List
|
||||
|
||||
from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import build
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GoogleCredentials,
|
||||
GoogleCredentialsField,
|
||||
GoogleCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class Attachment(BaseModel):
|
||||
filename: str
|
||||
content_type: str
|
||||
size: int
|
||||
attachment_id: str
|
||||
|
||||
|
||||
class Email(BaseModel):
|
||||
id: str
|
||||
subject: str
|
||||
snippet: str
|
||||
from_: str
|
||||
to: str
|
||||
date: str
|
||||
body: str = "" # Default to an empty string
|
||||
sizeEstimate: int
|
||||
attachments: List[Attachment]
|
||||
|
||||
|
||||
class GmailReadBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/gmail.readonly"]
|
||||
)
|
||||
query: str = SchemaField(
|
||||
description="Search query for reading emails",
|
||||
default="is:unread",
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of emails to retrieve",
|
||||
default=10,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
email: Email = SchemaField(
|
||||
description="Email data",
|
||||
)
|
||||
emails: list[Email] = SchemaField(
|
||||
description="List of email data",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="25310c70-b89b-43ba-b25c-4dfa7e2a481c",
|
||||
description="This block reads emails from Gmail.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
input_schema=GmailReadBlock.Input,
|
||||
output_schema=GmailReadBlock.Output,
|
||||
test_input={
|
||||
"query": "is:unread",
|
||||
"max_results": 5,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
[
|
||||
{
|
||||
"id": "1",
|
||||
"subject": "Test Email",
|
||||
"snippet": "This is a test email",
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_read_emails": lambda *args, **kwargs: [
|
||||
{
|
||||
"id": "1",
|
||||
"subject": "Test Email",
|
||||
"snippet": "This is a test email",
|
||||
}
|
||||
],
|
||||
"_send_email": lambda *args, **kwargs: {"id": "1", "status": "sent"},
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
messages = self._read_emails(
|
||||
service, input_data.query, input_data.max_results
|
||||
)
|
||||
for email in messages:
|
||||
yield "email", email
|
||||
yield "emails", messages
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=kwargs.get("client_id"),
|
||||
client_secret=kwargs.get("client_secret"),
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("gmail", "v1", credentials=creds)
|
||||
|
||||
def _read_emails(
|
||||
self, service, query: str | None, max_results: int | None
|
||||
) -> list[Email]:
|
||||
results = (
|
||||
service.users()
|
||||
.messages()
|
||||
.list(userId="me", q=query or "", maxResults=max_results or 10)
|
||||
.execute()
|
||||
)
|
||||
messages = results.get("messages", [])
|
||||
|
||||
email_data = []
|
||||
for message in messages:
|
||||
msg = (
|
||||
service.users()
|
||||
.messages()
|
||||
.get(userId="me", id=message["id"], format="full")
|
||||
.execute()
|
||||
)
|
||||
|
||||
headers = {
|
||||
header["name"].lower(): header["value"]
|
||||
for header in msg["payload"]["headers"]
|
||||
}
|
||||
|
||||
attachments = self._get_attachments(service, msg)
|
||||
|
||||
email = Email(
|
||||
id=msg["id"],
|
||||
subject=headers.get("subject", "No Subject"),
|
||||
snippet=msg["snippet"],
|
||||
from_=parseaddr(headers.get("from", ""))[1],
|
||||
to=parseaddr(headers.get("to", ""))[1],
|
||||
date=headers.get("date", ""),
|
||||
body=self._get_email_body(msg),
|
||||
sizeEstimate=msg["sizeEstimate"],
|
||||
attachments=attachments,
|
||||
)
|
||||
email_data.append(email)
|
||||
|
||||
return email_data
|
||||
|
||||
def _get_email_body(self, msg):
|
||||
if "parts" in msg["payload"]:
|
||||
for part in msg["payload"]["parts"]:
|
||||
if part["mimeType"] == "text/plain":
|
||||
return base64.urlsafe_b64decode(part["body"]["data"]).decode(
|
||||
"utf-8"
|
||||
)
|
||||
elif msg["payload"]["mimeType"] == "text/plain":
|
||||
return base64.urlsafe_b64decode(msg["payload"]["body"]["data"]).decode(
|
||||
"utf-8"
|
||||
)
|
||||
|
||||
return "This email does not contain a text body."
|
||||
|
||||
def _get_attachments(self, service, message):
|
||||
attachments = []
|
||||
if "parts" in message["payload"]:
|
||||
for part in message["payload"]["parts"]:
|
||||
if part["filename"]:
|
||||
attachment = Attachment(
|
||||
filename=part["filename"],
|
||||
content_type=part["mimeType"],
|
||||
size=int(part["body"].get("size", 0)),
|
||||
attachment_id=part["body"]["attachmentId"],
|
||||
)
|
||||
attachments.append(attachment)
|
||||
return attachments
|
||||
|
||||
# Add a new method to download attachment content
|
||||
def download_attachment(self, service, message_id: str, attachment_id: str):
|
||||
attachment = (
|
||||
service.users()
|
||||
.messages()
|
||||
.attachments()
|
||||
.get(userId="me", messageId=message_id, id=attachment_id)
|
||||
.execute()
|
||||
)
|
||||
file_data = base64.urlsafe_b64decode(attachment["data"].encode("UTF-8"))
|
||||
return file_data
|
||||
|
||||
|
||||
class GmailSendBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/gmail.send"]
|
||||
)
|
||||
to: str = SchemaField(
|
||||
description="Recipient email address",
|
||||
)
|
||||
subject: str = SchemaField(
|
||||
description="Email subject",
|
||||
)
|
||||
body: str = SchemaField(
|
||||
description="Email body",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: dict = SchemaField(
|
||||
description="Send confirmation",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="6c27abc2-e51d-499e-a85f-5a0041ba94f0",
|
||||
description="This block sends an email using Gmail.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
input_schema=GmailSendBlock.Input,
|
||||
output_schema=GmailSendBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"to": "recipient@example.com",
|
||||
"subject": "Test Email",
|
||||
"body": "This is a test email sent from GmailSendBlock.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("result", {"id": "1", "status": "sent"}),
|
||||
],
|
||||
test_mock={
|
||||
"_send_email": lambda *args, **kwargs: {"id": "1", "status": "sent"},
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = GmailReadBlock._build_service(credentials, **kwargs)
|
||||
send_result = self._send_email(
|
||||
service, input_data.to, input_data.subject, input_data.body
|
||||
)
|
||||
yield "result", send_result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def _send_email(self, service, to: str, subject: str, body: str) -> dict:
|
||||
if not to or not subject or not body:
|
||||
raise ValueError("To, subject, and body are required for sending an email")
|
||||
message = self._create_message(to, subject, body)
|
||||
sent_message = (
|
||||
service.users().messages().send(userId="me", body=message).execute()
|
||||
)
|
||||
return {"id": sent_message["id"], "status": "sent"}
|
||||
|
||||
def _create_message(self, to: str, subject: str, body: str) -> dict:
|
||||
import base64
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
message = MIMEText(body)
|
||||
message["to"] = to
|
||||
message["subject"] = subject
|
||||
raw_message = base64.urlsafe_b64encode(message.as_bytes()).decode("utf-8")
|
||||
return {"raw": raw_message}
|
||||
|
||||
|
||||
class GmailListLabelsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/gmail.labels"]
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: list[dict] = SchemaField(
|
||||
description="List of labels",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3e1c2c1c-c689-4520-b956-1f3bf4e02bb7",
|
||||
description="This block lists all labels in Gmail.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
input_schema=GmailListLabelsBlock.Input,
|
||||
output_schema=GmailListLabelsBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
[
|
||||
{"id": "Label_1", "name": "Important"},
|
||||
{"id": "Label_2", "name": "Work"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_list_labels": lambda *args, **kwargs: [
|
||||
{"id": "Label_1", "name": "Important"},
|
||||
{"id": "Label_2", "name": "Work"},
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = GmailReadBlock._build_service(credentials, **kwargs)
|
||||
labels = self._list_labels(service)
|
||||
yield "result", labels
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def _list_labels(self, service) -> list[dict]:
|
||||
results = service.users().labels().list(userId="me").execute()
|
||||
labels = results.get("labels", [])
|
||||
return [{"id": label["id"], "name": label["name"]} for label in labels]
|
||||
|
||||
|
||||
class GmailAddLabelBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/gmail.modify"]
|
||||
)
|
||||
message_id: str = SchemaField(
|
||||
description="Message ID to add label to",
|
||||
)
|
||||
label_name: str = SchemaField(
|
||||
description="Label name to add",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: dict = SchemaField(
|
||||
description="Label addition result",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f884b2fb-04f4-4265-9658-14f433926ac9",
|
||||
description="This block adds a label to a Gmail message.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
input_schema=GmailAddLabelBlock.Input,
|
||||
output_schema=GmailAddLabelBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"message_id": "12345",
|
||||
"label_name": "Important",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
{"status": "Label added successfully", "label_id": "Label_1"},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_add_label": lambda *args, **kwargs: {
|
||||
"status": "Label added successfully",
|
||||
"label_id": "Label_1",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = GmailReadBlock._build_service(credentials, **kwargs)
|
||||
result = self._add_label(
|
||||
service, input_data.message_id, input_data.label_name
|
||||
)
|
||||
yield "result", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def _add_label(self, service, message_id: str, label_name: str) -> dict:
|
||||
label_id = self._get_or_create_label(service, label_name)
|
||||
service.users().messages().modify(
|
||||
userId="me", id=message_id, body={"addLabelIds": [label_id]}
|
||||
).execute()
|
||||
return {"status": "Label added successfully", "label_id": label_id}
|
||||
|
||||
def _get_or_create_label(self, service, label_name: str) -> str:
|
||||
label_id = self._get_label_id(service, label_name)
|
||||
if not label_id:
|
||||
label = (
|
||||
service.users()
|
||||
.labels()
|
||||
.create(userId="me", body={"name": label_name})
|
||||
.execute()
|
||||
)
|
||||
label_id = label["id"]
|
||||
return label_id
|
||||
|
||||
def _get_label_id(self, service, label_name: str) -> str | None:
|
||||
results = service.users().labels().list(userId="me").execute()
|
||||
labels = results.get("labels", [])
|
||||
for label in labels:
|
||||
if label["name"] == label_name:
|
||||
return label["id"]
|
||||
return None
|
||||
|
||||
|
||||
class GmailRemoveLabelBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/gmail.modify"]
|
||||
)
|
||||
message_id: str = SchemaField(
|
||||
description="Message ID to remove label from",
|
||||
)
|
||||
label_name: str = SchemaField(
|
||||
description="Label name to remove",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: dict = SchemaField(
|
||||
description="Label removal result",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0afc0526-aba1-4b2b-888e-a22b7c3f359d",
|
||||
description="This block removes a label from a Gmail message.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
input_schema=GmailRemoveLabelBlock.Input,
|
||||
output_schema=GmailRemoveLabelBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"message_id": "12345",
|
||||
"label_name": "Important",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
{"status": "Label removed successfully", "label_id": "Label_1"},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_remove_label": lambda *args, **kwargs: {
|
||||
"status": "Label removed successfully",
|
||||
"label_id": "Label_1",
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = GmailReadBlock._build_service(credentials, **kwargs)
|
||||
result = self._remove_label(
|
||||
service, input_data.message_id, input_data.label_name
|
||||
)
|
||||
yield "result", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def _remove_label(self, service, message_id: str, label_name: str) -> dict:
|
||||
label_id = self._get_label_id(service, label_name)
|
||||
if label_id:
|
||||
service.users().messages().modify(
|
||||
userId="me", id=message_id, body={"removeLabelIds": [label_id]}
|
||||
).execute()
|
||||
return {"status": "Label removed successfully", "label_id": label_id}
|
||||
else:
|
||||
return {"status": "Label not found", "label_name": label_name}
|
||||
|
||||
def _get_label_id(self, service, label_name: str) -> str | None:
|
||||
results = service.users().labels().list(userId="me").execute()
|
||||
labels = results.get("labels", [])
|
||||
for label in labels:
|
||||
if label["name"] == label_name:
|
||||
return label["id"]
|
||||
return None
|
||||
192
autogpt_platform/backend/backend/blocks/google/sheets.py
Normal file
192
autogpt_platform/backend/backend/blocks/google/sheets.py
Normal file
@@ -0,0 +1,192 @@
|
||||
from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import build
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GoogleCredentials,
|
||||
GoogleCredentialsField,
|
||||
GoogleCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class GoogleSheetsReadBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/spreadsheets.readonly"]
|
||||
)
|
||||
spreadsheet_id: str = SchemaField(
|
||||
description="The ID of the spreadsheet to read from",
|
||||
)
|
||||
range: str = SchemaField(
|
||||
description="The A1 notation of the range to read",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: list[list[str]] = SchemaField(
|
||||
description="The data read from the spreadsheet",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5724e902-3635-47e9-a108-aaa0263a4988",
|
||||
description="This block reads data from a Google Sheets spreadsheet.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=GoogleSheetsReadBlock.Input,
|
||||
output_schema=GoogleSheetsReadBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"spreadsheet_id": "1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms",
|
||||
"range": "Sheet1!A1:B2",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
[
|
||||
["Name", "Score"],
|
||||
["Alice", "85"],
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_read_sheet": lambda *args, **kwargs: [
|
||||
["Name", "Score"],
|
||||
["Alice", "85"],
|
||||
],
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
data = self._read_sheet(
|
||||
service, input_data.spreadsheet_id, input_data.range
|
||||
)
|
||||
yield "result", data
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=kwargs.get("client_id"),
|
||||
client_secret=kwargs.get("client_secret"),
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("sheets", "v4", credentials=creds)
|
||||
|
||||
def _read_sheet(self, service, spreadsheet_id: str, range: str) -> list[list[str]]:
|
||||
sheet = service.spreadsheets()
|
||||
result = sheet.values().get(spreadsheetId=spreadsheet_id, range=range).execute()
|
||||
return result.get("values", [])
|
||||
|
||||
|
||||
class GoogleSheetsWriteBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/spreadsheets"]
|
||||
)
|
||||
spreadsheet_id: str = SchemaField(
|
||||
description="The ID of the spreadsheet to write to",
|
||||
)
|
||||
range: str = SchemaField(
|
||||
description="The A1 notation of the range to write",
|
||||
)
|
||||
values: list[list[str]] = SchemaField(
|
||||
description="The data to write to the spreadsheet",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: dict = SchemaField(
|
||||
description="The result of the write operation",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if any",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d9291e87-301d-47a8-91fe-907fb55460e5",
|
||||
description="This block writes data to a Google Sheets spreadsheet.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=GoogleSheetsWriteBlock.Input,
|
||||
output_schema=GoogleSheetsWriteBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"spreadsheet_id": "1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms",
|
||||
"range": "Sheet1!A1:B2",
|
||||
"values": [
|
||||
["Name", "Score"],
|
||||
["Bob", "90"],
|
||||
],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
{"updatedCells": 4, "updatedColumns": 2, "updatedRows": 2},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"_write_sheet": lambda *args, **kwargs: {
|
||||
"updatedCells": 4,
|
||||
"updatedColumns": 2,
|
||||
"updatedRows": 2,
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = GoogleSheetsReadBlock._build_service(credentials, **kwargs)
|
||||
result = self._write_sheet(
|
||||
service,
|
||||
input_data.spreadsheet_id,
|
||||
input_data.range,
|
||||
input_data.values,
|
||||
)
|
||||
yield "result", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def _write_sheet(
|
||||
self, service, spreadsheet_id: str, range: str, values: list[list[str]]
|
||||
) -> dict:
|
||||
body = {"values": values}
|
||||
result = (
|
||||
service.spreadsheets()
|
||||
.values()
|
||||
.update(
|
||||
spreadsheetId=spreadsheet_id,
|
||||
range=range,
|
||||
valueInputOption="USER_ENTERED",
|
||||
body=body,
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
return result
|
||||
264
autogpt_platform/backend/backend/blocks/ideogram.py
Normal file
264
autogpt_platform/backend/backend/blocks/ideogram.py
Normal file
@@ -0,0 +1,264 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import BlockSecret, SchemaField, SecretField
|
||||
|
||||
|
||||
class IdeogramModelName(str, Enum):
|
||||
V2 = "V_2"
|
||||
V1 = "V_1"
|
||||
V1_TURBO = "V_1_TURBO"
|
||||
V2_TURBO = "V_2_TURBO"
|
||||
|
||||
|
||||
class MagicPromptOption(str, Enum):
|
||||
AUTO = "AUTO"
|
||||
ON = "ON"
|
||||
OFF = "OFF"
|
||||
|
||||
|
||||
class StyleType(str, Enum):
|
||||
AUTO = "AUTO"
|
||||
GENERAL = "GENERAL"
|
||||
REALISTIC = "REALISTIC"
|
||||
DESIGN = "DESIGN"
|
||||
RENDER_3D = "RENDER_3D"
|
||||
ANIME = "ANIME"
|
||||
|
||||
|
||||
class ColorPalettePreset(str, Enum):
|
||||
NONE = "NONE"
|
||||
EMBER = "EMBER"
|
||||
FRESH = "FRESH"
|
||||
JUNGLE = "JUNGLE"
|
||||
MAGIC = "MAGIC"
|
||||
MELON = "MELON"
|
||||
MOSAIC = "MOSAIC"
|
||||
PASTEL = "PASTEL"
|
||||
ULTRAMARINE = "ULTRAMARINE"
|
||||
|
||||
|
||||
class AspectRatio(str, Enum):
|
||||
ASPECT_10_16 = "ASPECT_10_16"
|
||||
ASPECT_16_10 = "ASPECT_16_10"
|
||||
ASPECT_9_16 = "ASPECT_9_16"
|
||||
ASPECT_16_9 = "ASPECT_16_9"
|
||||
ASPECT_3_2 = "ASPECT_3_2"
|
||||
ASPECT_2_3 = "ASPECT_2_3"
|
||||
ASPECT_4_3 = "ASPECT_4_3"
|
||||
ASPECT_3_4 = "ASPECT_3_4"
|
||||
ASPECT_1_1 = "ASPECT_1_1"
|
||||
ASPECT_1_3 = "ASPECT_1_3"
|
||||
ASPECT_3_1 = "ASPECT_3_1"
|
||||
|
||||
|
||||
class UpscaleOption(str, Enum):
|
||||
AI_UPSCALE = "AI Upscale"
|
||||
NO_UPSCALE = "No Upscale"
|
||||
|
||||
|
||||
class IdeogramModelBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
api_key: BlockSecret = SecretField(
|
||||
key="ideogram_api_key",
|
||||
description="Ideogram API Key",
|
||||
)
|
||||
prompt: str = SchemaField(
|
||||
description="Text prompt for image generation",
|
||||
placeholder="e.g., 'A futuristic cityscape at sunset'",
|
||||
title="Prompt",
|
||||
)
|
||||
ideogram_model_name: IdeogramModelName = SchemaField(
|
||||
description="The name of the Image Generation Model, e.g., V_2",
|
||||
default=IdeogramModelName.V2,
|
||||
title="Image Generation Model",
|
||||
enum=IdeogramModelName,
|
||||
advanced=False,
|
||||
)
|
||||
aspect_ratio: AspectRatio = SchemaField(
|
||||
description="Aspect ratio for the generated image",
|
||||
default=AspectRatio.ASPECT_1_1,
|
||||
title="Aspect Ratio",
|
||||
enum=AspectRatio,
|
||||
advanced=False,
|
||||
)
|
||||
upscale: UpscaleOption = SchemaField(
|
||||
description="Upscale the generated image",
|
||||
default=UpscaleOption.NO_UPSCALE,
|
||||
title="Upscale Image",
|
||||
enum=UpscaleOption,
|
||||
advanced=False,
|
||||
)
|
||||
magic_prompt_option: MagicPromptOption = SchemaField(
|
||||
description="Whether to use MagicPrompt for enhancing the request",
|
||||
default=MagicPromptOption.AUTO,
|
||||
title="Magic Prompt Option",
|
||||
enum=MagicPromptOption,
|
||||
advanced=True,
|
||||
)
|
||||
seed: Optional[int] = SchemaField(
|
||||
description="Random seed. Set for reproducible generation",
|
||||
default=None,
|
||||
title="Seed",
|
||||
advanced=True,
|
||||
)
|
||||
style_type: StyleType = SchemaField(
|
||||
description="Style type to apply, applicable for V_2 and above",
|
||||
default=StyleType.AUTO,
|
||||
title="Style Type",
|
||||
enum=StyleType,
|
||||
advanced=True,
|
||||
)
|
||||
negative_prompt: Optional[str] = SchemaField(
|
||||
description="Description of what to exclude from the image",
|
||||
default=None,
|
||||
title="Negative Prompt",
|
||||
advanced=True,
|
||||
)
|
||||
color_palette_name: ColorPalettePreset = SchemaField(
|
||||
description="Color palette preset name, choose 'None' to skip",
|
||||
default=ColorPalettePreset.NONE,
|
||||
title="Color Palette Preset",
|
||||
enum=ColorPalettePreset,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: str = SchemaField(description="Generated image URL")
|
||||
error: Optional[str] = SchemaField(
|
||||
description="Error message if the model run failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
description="This block runs Ideogram models with both simple and advanced settings.",
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=IdeogramModelBlock.Input,
|
||||
output_schema=IdeogramModelBlock.Output,
|
||||
test_input={
|
||||
"api_key": "test_api_key",
|
||||
"ideogram_model_name": IdeogramModelName.V2,
|
||||
"prompt": "A futuristic cityscape at sunset",
|
||||
"aspect_ratio": AspectRatio.ASPECT_1_1,
|
||||
"upscale": UpscaleOption.NO_UPSCALE,
|
||||
"magic_prompt_option": MagicPromptOption.AUTO,
|
||||
"seed": None,
|
||||
"style_type": StyleType.AUTO,
|
||||
"negative_prompt": None,
|
||||
"color_palette_name": ColorPalettePreset.NONE,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"result",
|
||||
"https://ideogram.ai/api/images/test-generated-image-url.png",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"run_model": lambda api_key, model_name, prompt, seed, aspect_ratio, magic_prompt_option, style_type, negative_prompt, color_palette_name: "https://ideogram.ai/api/images/test-generated-image-url.png",
|
||||
"upscale_image": lambda api_key, image_url: "https://ideogram.ai/api/images/test-upscaled-image-url.png",
|
||||
},
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
seed = input_data.seed
|
||||
|
||||
try:
|
||||
# Step 1: Generate the image
|
||||
result = self.run_model(
|
||||
api_key=input_data.api_key.get_secret_value(),
|
||||
model_name=input_data.ideogram_model_name.value,
|
||||
prompt=input_data.prompt,
|
||||
seed=seed,
|
||||
aspect_ratio=input_data.aspect_ratio.value,
|
||||
magic_prompt_option=input_data.magic_prompt_option.value,
|
||||
style_type=input_data.style_type.value,
|
||||
negative_prompt=input_data.negative_prompt,
|
||||
color_palette_name=input_data.color_palette_name.value,
|
||||
)
|
||||
|
||||
# Step 2: Upscale the image if requested
|
||||
if input_data.upscale == UpscaleOption.AI_UPSCALE:
|
||||
result = self.upscale_image(
|
||||
api_key=input_data.api_key.get_secret_value(),
|
||||
image_url=result,
|
||||
)
|
||||
|
||||
yield "result", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def run_model(
|
||||
self,
|
||||
api_key: str,
|
||||
model_name: str,
|
||||
prompt: str,
|
||||
seed: Optional[int],
|
||||
aspect_ratio: str,
|
||||
magic_prompt_option: str,
|
||||
style_type: str,
|
||||
negative_prompt: Optional[str],
|
||||
color_palette_name: str,
|
||||
):
|
||||
url = "https://api.ideogram.ai/generate"
|
||||
headers = {"Api-Key": api_key, "Content-Type": "application/json"}
|
||||
|
||||
data: Dict[str, Any] = {
|
||||
"image_request": {
|
||||
"prompt": prompt,
|
||||
"model": model_name,
|
||||
"aspect_ratio": aspect_ratio,
|
||||
"magic_prompt_option": magic_prompt_option,
|
||||
"style_type": style_type,
|
||||
}
|
||||
}
|
||||
|
||||
if seed is not None:
|
||||
data["image_request"]["seed"] = seed
|
||||
|
||||
if negative_prompt:
|
||||
data["image_request"]["negative_prompt"] = negative_prompt
|
||||
|
||||
if color_palette_name != "NONE":
|
||||
data["image_request"]["color_palette"] = {"name": color_palette_name}
|
||||
|
||||
try:
|
||||
response = requests.post(url, json=data, headers=headers)
|
||||
response.raise_for_status()
|
||||
return response.json()["data"][0]["url"]
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise Exception(f"Failed to fetch image: {str(e)}")
|
||||
|
||||
def upscale_image(self, api_key: str, image_url: str):
|
||||
url = "https://api.ideogram.ai/upscale"
|
||||
headers = {
|
||||
"Api-Key": api_key,
|
||||
}
|
||||
|
||||
try:
|
||||
# Step 1: Download the image from the provided URL
|
||||
image_response = requests.get(image_url)
|
||||
image_response.raise_for_status()
|
||||
|
||||
# Step 2: Send the downloaded image to the upscale API
|
||||
files = {
|
||||
"image_file": ("image.png", image_response.content, "image/png"),
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
data={
|
||||
"image_request": "{}", # Empty JSON object
|
||||
},
|
||||
files=files,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()["data"][0]["url"]
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise Exception(f"Failed to upscale image: {str(e)}")
|
||||
@@ -19,7 +19,7 @@ class StepThroughItemsBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f8e7d6c5-b4a3-2c1d-0e9f-8g7h6i5j4k3l",
|
||||
id="f66a3543-28d3-4ab5-8945-9b336371e2ce",
|
||||
input_schema=StepThroughItemsBlock.Input,
|
||||
output_schema=StepThroughItemsBlock.Output,
|
||||
categories={BlockCategory.LOGIC},
|
||||
|
||||
@@ -392,7 +392,7 @@ class AITextSummarizerBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c3d4e5f6-7g8h-9i0j-1k2l-m3n4o5p6q7r8",
|
||||
id="a0a69be1-4528-491c-a85a-a4ab6873e3f0",
|
||||
description="Utilize a Large Language Model (LLM) to summarize a long text.",
|
||||
categories={BlockCategory.AI, BlockCategory.TEXT},
|
||||
input_schema=AITextSummarizerBlock.Input,
|
||||
@@ -535,7 +535,7 @@ class AIConversationBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c3d4e5f6-g7h8-i9j0-k1l2-m3n4o5p6q7r8",
|
||||
id="32a87eab-381e-4dd4-bdb8-4c47151be35a",
|
||||
description="Advanced LLM call that takes a list of messages and sends them to the language model.",
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=AIConversationBlock.Input,
|
||||
|
||||
@@ -11,12 +11,14 @@ from backend.data.model import BlockSecret, SchemaField, SecretField
|
||||
class ReplicateFluxModelName(str, Enum):
|
||||
FLUX_SCHNELL = ("Flux Schnell",)
|
||||
FLUX_PRO = ("Flux Pro",)
|
||||
FLUX_PRO1_1 = ("Flux Pro 1.1",)
|
||||
|
||||
@property
|
||||
def api_name(self):
|
||||
api_names = {
|
||||
ReplicateFluxModelName.FLUX_SCHNELL: "black-forest-labs/flux-schnell",
|
||||
ReplicateFluxModelName.FLUX_PRO: "black-forest-labs/flux-pro",
|
||||
ReplicateFluxModelName.FLUX_PRO1_1: "black-forest-labs/flux-1.1-pro",
|
||||
}
|
||||
return api_names[self]
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ class ReadRSSFeedBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c6731acb-4105-4zp1-bc9b-03d0036h370g",
|
||||
id="5ebe6768-8e5d-41e3-9134-1c7bd89a8d52",
|
||||
input_schema=ReadRSSFeedBlock.Input,
|
||||
output_schema=ReadRSSFeedBlock.Output,
|
||||
description="Reads RSS feed entries from a given URL.",
|
||||
|
||||
@@ -25,7 +25,7 @@ class GetWikipediaSummaryBlock(Block, GetRequest):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="h5e7f8g9-1b2c-3d4e-5f6g-7h8i9j0k1l2m",
|
||||
id="f5b0f5d0-1862-4d61-94be-3ad0fa772760",
|
||||
description="This block fetches the summary of a given topic from Wikipedia.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=GetWikipediaSummaryBlock.Input,
|
||||
@@ -62,7 +62,7 @@ class SearchTheWebBlock(Block, GetRequest):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7",
|
||||
id="87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
description="This block searches the internet for the given search query.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=SearchTheWebBlock.Input,
|
||||
@@ -109,7 +109,7 @@ class ExtractWebsiteContentBlock(Block, GetRequest):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a1b2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6", # Unique ID for the block
|
||||
id="436c3984-57fd-4b85-8e9a-459b356883bd",
|
||||
description="This block scrapes the content from the given web URL.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=ExtractWebsiteContentBlock.Input,
|
||||
|
||||
@@ -0,0 +1,79 @@
|
||||
from typing import Any
|
||||
|
||||
import requests
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import BlockSecret, SchemaField, SecretField
|
||||
|
||||
|
||||
class UnrealTextToSpeechBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(
|
||||
description="The text to be converted to speech",
|
||||
placeholder="Enter the text you want to convert to speech",
|
||||
)
|
||||
voice_id: str = SchemaField(
|
||||
description="The voice ID to use for text-to-speech conversion",
|
||||
placeholder="Scarlett",
|
||||
default="Scarlett",
|
||||
)
|
||||
api_key: BlockSecret = SecretField(
|
||||
key="unreal_speech_api_key", description="Your Unreal Speech API key"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
mp3_url: str = SchemaField(description="The URL of the generated MP3 file")
|
||||
error: str = SchemaField(description="Error message if the API call failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4ff1ff6d-cc40-4caa-ae69-011daa20c378",
|
||||
description="Converts text to speech using the Unreal Speech API",
|
||||
categories={BlockCategory.AI, BlockCategory.TEXT},
|
||||
input_schema=UnrealTextToSpeechBlock.Input,
|
||||
output_schema=UnrealTextToSpeechBlock.Output,
|
||||
test_input={
|
||||
"text": "This is a test of the text to speech API.",
|
||||
"voice_id": "Scarlett",
|
||||
"api_key": "test_api_key",
|
||||
},
|
||||
test_output=[("mp3_url", "https://example.com/test.mp3")],
|
||||
test_mock={
|
||||
"call_unreal_speech_api": lambda *args, **kwargs: {
|
||||
"OutputUri": "https://example.com/test.mp3"
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def call_unreal_speech_api(
|
||||
api_key: str, text: str, voice_id: str
|
||||
) -> dict[str, Any]:
|
||||
url = "https://api.v7.unrealspeech.com/speech"
|
||||
headers = {
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
data = {
|
||||
"Text": text,
|
||||
"VoiceId": voice_id,
|
||||
"Bitrate": "192k",
|
||||
"Speed": "0",
|
||||
"Pitch": "1",
|
||||
"TimestampType": "sentence",
|
||||
}
|
||||
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
api_response = self.call_unreal_speech_api(
|
||||
input_data.api_key.get_secret_value(),
|
||||
input_data.text,
|
||||
input_data.voice_id,
|
||||
)
|
||||
yield "mp3_url", api_response["OutputUri"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -77,7 +77,7 @@ class GetCurrentDateAndTimeBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0h2",
|
||||
id="716a67b3-6760-42e7-86dc-18645c6e00fc",
|
||||
description="This block outputs the current date and time.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=GetCurrentDateAndTimeBlock.Input,
|
||||
|
||||
@@ -45,7 +45,9 @@ class BlockCategory(Enum):
|
||||
INPUT = "Block that interacts with input of the graph."
|
||||
OUTPUT = "Block that interacts with output of the graph."
|
||||
LOGIC = "Programming logic to control the flow of your agent"
|
||||
COMMUNICATION = "Block that interacts with communication platforms."
|
||||
DEVELOPER_TOOLS = "Developer tools such as GitHub blocks."
|
||||
DATA = "Block that interacts with structured data."
|
||||
|
||||
def dict(self) -> dict[str, str]:
|
||||
return {"category": self.name, "description": self.value}
|
||||
@@ -286,7 +288,9 @@ def get_blocks() -> dict[str, Block]:
|
||||
|
||||
async def initialize_blocks() -> None:
|
||||
for block in get_blocks().values():
|
||||
existing_block = await AgentBlock.prisma().find_unique(where={"id": block.id})
|
||||
existing_block = await AgentBlock.prisma().find_first(
|
||||
where={"OR": [{"id": block.id}, {"name": block.name}]}
|
||||
)
|
||||
if not existing_block:
|
||||
await AgentBlock.prisma().create(
|
||||
data={
|
||||
@@ -301,13 +305,15 @@ async def initialize_blocks() -> None:
|
||||
input_schema = json.dumps(block.input_schema.jsonschema())
|
||||
output_schema = json.dumps(block.output_schema.jsonschema())
|
||||
if (
|
||||
block.name != existing_block.name
|
||||
block.id != existing_block.id
|
||||
or block.name != existing_block.name
|
||||
or input_schema != existing_block.inputSchema
|
||||
or output_schema != existing_block.outputSchema
|
||||
):
|
||||
await AgentBlock.prisma().update(
|
||||
where={"id": block.id},
|
||||
where={"id": existing_block.id},
|
||||
data={
|
||||
"id": block.id,
|
||||
"name": block.name,
|
||||
"inputSchema": input_schema,
|
||||
"outputSchema": output_schema,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
from contextlib import asynccontextmanager
|
||||
@@ -8,40 +7,30 @@ from dotenv import load_dotenv
|
||||
from prisma import Prisma
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.util.retry import conn_retry
|
||||
|
||||
load_dotenv()
|
||||
|
||||
PRISMA_SCHEMA = os.getenv("PRISMA_SCHEMA", "schema.prisma")
|
||||
os.environ["PRISMA_SCHEMA_PATH"] = PRISMA_SCHEMA
|
||||
|
||||
prisma, conn_id = Prisma(auto_register=True), ""
|
||||
prisma = Prisma(auto_register=True)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def connect(call_count=0):
|
||||
global conn_id
|
||||
if not conn_id:
|
||||
conn_id = str(uuid4())
|
||||
|
||||
try:
|
||||
logger.info(f"[Prisma-{conn_id}] Acquiring connection..")
|
||||
if not prisma.is_connected():
|
||||
await prisma.connect()
|
||||
logger.info(f"[Prisma-{conn_id}] Connection acquired!")
|
||||
except Exception as e:
|
||||
if call_count <= 5:
|
||||
logger.info(f"[Prisma-{conn_id}] Connection failed: {e}. Retrying now..")
|
||||
await asyncio.sleep(2**call_count)
|
||||
await connect(call_count + 1)
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
async def disconnect():
|
||||
@conn_retry("Prisma", "Acquiring connection")
|
||||
async def connect():
|
||||
if prisma.is_connected():
|
||||
logger.info(f"[Prisma-{conn_id}] Releasing connection.")
|
||||
await prisma.disconnect()
|
||||
logger.info(f"[Prisma-{conn_id}] Connection released.")
|
||||
return
|
||||
await prisma.connect()
|
||||
|
||||
|
||||
@conn_retry("Prisma", "Releasing connection")
|
||||
async def disconnect():
|
||||
if not prisma.is_connected():
|
||||
return
|
||||
await prisma.disconnect()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
||||
@@ -268,10 +268,29 @@ async def update_graph_execution_start_time(graph_exec_id: str):
|
||||
)
|
||||
|
||||
|
||||
async def update_graph_execution_stats(graph_exec_id: str, stats: dict[str, Any]):
|
||||
async def update_graph_execution_stats(
|
||||
graph_exec_id: str,
|
||||
error: Exception | None,
|
||||
wall_time: float,
|
||||
cpu_time: float,
|
||||
node_count: int,
|
||||
):
|
||||
status = ExecutionStatus.FAILED if error else ExecutionStatus.COMPLETED
|
||||
stats = (
|
||||
{
|
||||
"walltime": wall_time,
|
||||
"cputime": cpu_time,
|
||||
"nodecount": node_count,
|
||||
"error": str(error) if error else None,
|
||||
},
|
||||
)
|
||||
|
||||
await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
data={"executionStatus": ExecutionStatus.COMPLETED, "stats": json.dumps(stats)},
|
||||
data={
|
||||
"executionStatus": status,
|
||||
"stats": json.dumps(stats),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
|
||||
from redis.asyncio import Redis
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.execution import ExecutionResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -18,60 +16,46 @@ class DateTimeEncoder(json.JSONEncoder):
|
||||
return super().default(o)
|
||||
|
||||
|
||||
class AsyncEventQueue(ABC):
|
||||
class AbstractEventQueue(ABC):
|
||||
@abstractmethod
|
||||
async def connect(self):
|
||||
def connect(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def close(self):
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def put(self, execution_result: ExecutionResult):
|
||||
def put(self, execution_result: ExecutionResult):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get(self) -> ExecutionResult | None:
|
||||
def get(self) -> ExecutionResult | None:
|
||||
pass
|
||||
|
||||
|
||||
class AsyncRedisEventQueue(AsyncEventQueue):
|
||||
class RedisEventQueue(AbstractEventQueue):
|
||||
def __init__(self):
|
||||
self.host = os.getenv("REDIS_HOST", "localhost")
|
||||
self.port = int(os.getenv("REDIS_PORT", "6379"))
|
||||
self.password = os.getenv("REDIS_PASSWORD", "password")
|
||||
self.queue_name = os.getenv("REDIS_QUEUE", "execution_events")
|
||||
self.connection = None
|
||||
self.queue_name = redis.QUEUE_NAME
|
||||
|
||||
async def connect(self):
|
||||
if not self.connection:
|
||||
self.connection = Redis(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
password=self.password,
|
||||
decode_responses=True,
|
||||
)
|
||||
await self.connection.ping()
|
||||
logger.info(f"Connected to Redis on {self.host}:{self.port}")
|
||||
def connect(self):
|
||||
self.connection = redis.connect()
|
||||
|
||||
async def put(self, execution_result: ExecutionResult):
|
||||
def put(self, execution_result: ExecutionResult):
|
||||
if self.connection:
|
||||
message = json.dumps(execution_result.model_dump(), cls=DateTimeEncoder)
|
||||
logger.info(f"Putting execution result to Redis {message}")
|
||||
await self.connection.lpush(self.queue_name, message) # type: ignore
|
||||
self.connection.lpush(self.queue_name, message)
|
||||
|
||||
async def get(self) -> ExecutionResult | None:
|
||||
def get(self) -> ExecutionResult | None:
|
||||
if self.connection:
|
||||
message = await self.connection.rpop(self.queue_name) # type: ignore
|
||||
message = self.connection.rpop(self.queue_name)
|
||||
if message is not None and isinstance(message, (str, bytes, bytearray)):
|
||||
data = json.loads(message)
|
||||
logger.info(f"Getting execution result from Redis {data}")
|
||||
return ExecutionResult(**data)
|
||||
return None
|
||||
|
||||
async def close(self):
|
||||
if self.connection:
|
||||
await self.connection.close()
|
||||
self.connection = None
|
||||
logger.info("Closed connection to Redis")
|
||||
def close(self):
|
||||
redis.disconnect()
|
||||
|
||||
48
autogpt_platform/backend/backend/data/redis.py
Normal file
48
autogpt_platform/backend/backend/data/redis.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import logging
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
from redis import Redis
|
||||
|
||||
from backend.util.retry import conn_retry
|
||||
|
||||
load_dotenv()
|
||||
|
||||
HOST = os.getenv("REDIS_HOST", "localhost")
|
||||
PORT = int(os.getenv("REDIS_PORT", "6379"))
|
||||
PASSWORD = os.getenv("REDIS_PASSWORD", "password")
|
||||
QUEUE_NAME = os.getenv("REDIS_QUEUE", "execution_events")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
connection: Redis | None = None
|
||||
|
||||
|
||||
@conn_retry("Redis", "Acquiring connection")
|
||||
def connect() -> Redis:
|
||||
global connection
|
||||
if connection:
|
||||
return connection
|
||||
|
||||
c = Redis(
|
||||
host=HOST,
|
||||
port=PORT,
|
||||
password=PASSWORD,
|
||||
decode_responses=True,
|
||||
)
|
||||
c.ping()
|
||||
connection = c
|
||||
return connection
|
||||
|
||||
|
||||
@conn_retry("Redis", "Releasing connection")
|
||||
def disconnect():
|
||||
global connection
|
||||
if connection:
|
||||
connection.close()
|
||||
connection = None
|
||||
|
||||
|
||||
def get_redis() -> Redis:
|
||||
if not connection:
|
||||
raise RuntimeError("Redis connection is not established")
|
||||
return connection
|
||||
@@ -17,7 +17,7 @@ from pydantic import BaseModel
|
||||
if TYPE_CHECKING:
|
||||
from backend.server.rest_api import AgentServer
|
||||
|
||||
from backend.data import db
|
||||
from backend.data import db, redis
|
||||
from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
@@ -216,12 +216,13 @@ def execute_node(
|
||||
|
||||
|
||||
@contextmanager
|
||||
def synchronized(api_client: "AgentServer", key: Any):
|
||||
api_client.acquire_lock(key)
|
||||
def synchronized(key: str, timeout: int = 60):
|
||||
lock = redis.get_redis().lock(f"lock:{key}", timeout=timeout)
|
||||
try:
|
||||
lock.acquire()
|
||||
yield
|
||||
finally:
|
||||
api_client.release_lock(key)
|
||||
lock.release()
|
||||
|
||||
|
||||
def _enqueue_next_nodes(
|
||||
@@ -268,7 +269,7 @@ def _enqueue_next_nodes(
|
||||
# Multiple node can register the same next node, we need this to be atomic
|
||||
# To avoid same execution to be enqueued multiple times,
|
||||
# Or the same input to be consumed multiple times.
|
||||
with synchronized(api_client, ("upsert_input", next_node_id, graph_exec_id)):
|
||||
with synchronized(f"upsert_input-{next_node_id}-{graph_exec_id}"):
|
||||
# Add output data to the earliest incomplete execution, or create a new one.
|
||||
next_node_exec_id, next_node_input = wait(
|
||||
upsert_execution_input(
|
||||
@@ -437,6 +438,7 @@ class Executor:
|
||||
cls.loop = asyncio.new_event_loop()
|
||||
cls.pid = os.getpid()
|
||||
|
||||
redis.connect()
|
||||
cls.loop.run_until_complete(db.connect())
|
||||
cls.agent_server_client = get_agent_server_client()
|
||||
|
||||
@@ -454,6 +456,8 @@ class Executor:
|
||||
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ⏳ Disconnecting DB...")
|
||||
cls.loop.run_until_complete(db.disconnect())
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ⏳ Disconnecting Redis...")
|
||||
redis.disconnect()
|
||||
logger.info(f"[on_node_executor_stop {cls.pid}] ✅ Finished cleanup")
|
||||
|
||||
@classmethod
|
||||
@@ -561,18 +565,17 @@ class Executor:
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
timing_info, node_count = cls._on_graph_execution(
|
||||
timing_info, (node_count, error) = cls._on_graph_execution(
|
||||
graph_exec, cancel, log_metadata
|
||||
)
|
||||
|
||||
cls.loop.run_until_complete(
|
||||
update_graph_execution_stats(
|
||||
graph_exec.graph_exec_id,
|
||||
{
|
||||
"walltime": timing_info.wall_time,
|
||||
"cputime": timing_info.cpu_time,
|
||||
"nodecount": node_count,
|
||||
},
|
||||
graph_exec_id=graph_exec.graph_exec_id,
|
||||
error=error,
|
||||
wall_time=timing_info.wall_time,
|
||||
cpu_time=timing_info.cpu_time,
|
||||
node_count=node_count,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -583,9 +586,15 @@ class Executor:
|
||||
graph_exec: GraphExecution,
|
||||
cancel: threading.Event,
|
||||
log_metadata: LogMetadata,
|
||||
) -> int:
|
||||
) -> tuple[int, Exception | None]:
|
||||
"""
|
||||
Returns:
|
||||
The number of node executions completed.
|
||||
The error that occurred during the execution.
|
||||
"""
|
||||
log_metadata.info(f"Start graph execution {graph_exec.graph_exec_id}")
|
||||
n_node_executions = 0
|
||||
error = None
|
||||
finished = False
|
||||
|
||||
def cancel_handler():
|
||||
@@ -619,7 +628,8 @@ class Executor:
|
||||
|
||||
while not queue.empty():
|
||||
if cancel.is_set():
|
||||
return n_node_executions
|
||||
error = RuntimeError("Execution is cancelled")
|
||||
return n_node_executions, error
|
||||
|
||||
exec_data = queue.get()
|
||||
|
||||
@@ -653,7 +663,8 @@ class Executor:
|
||||
)
|
||||
for node_id, execution in list(running_executions.items()):
|
||||
if cancel.is_set():
|
||||
return n_node_executions
|
||||
error = RuntimeError("Execution is cancelled")
|
||||
return n_node_executions, error
|
||||
|
||||
if not queue.empty():
|
||||
break # yield to parent loop to execute new queue items
|
||||
@@ -666,12 +677,13 @@ class Executor:
|
||||
log_metadata.exception(
|
||||
f"Failed graph execution {graph_exec.graph_exec_id}: {e}"
|
||||
)
|
||||
error = e
|
||||
finally:
|
||||
if not cancel.is_set():
|
||||
finished = True
|
||||
cancel.set()
|
||||
cancel_thread.join()
|
||||
return n_node_executions
|
||||
return n_node_executions, error
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
@@ -24,7 +24,6 @@ class ExecutionScheduler(AppService):
|
||||
self.use_db = True
|
||||
self.last_check = datetime.min
|
||||
self.refresh_interval = refresh_interval
|
||||
self.use_redis = False
|
||||
|
||||
@property
|
||||
def execution_manager_client(self) -> ExecutionManager:
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
import logging
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseOAuthHandler(ABC):
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
DEFAULT_SCOPES: ClassVar[list[str]] = []
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str): ...
|
||||
@@ -17,7 +21,9 @@ class BaseOAuthHandler(ABC):
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str]
|
||||
) -> OAuth2Credentials:
|
||||
"""Exchanges the acquired authorization code from login for a set of tokens"""
|
||||
...
|
||||
|
||||
@@ -46,3 +52,11 @@ class BaseOAuthHandler(ABC):
|
||||
credentials.access_token_expires_at is not None
|
||||
and credentials.access_token_expires_at < int(time.time()) + 300
|
||||
)
|
||||
|
||||
def handle_default_scopes(self, scopes: list[str]) -> list[str]:
|
||||
"""Handles the default scopes for the provider"""
|
||||
# If scopes are empty, use the default scopes for the provider
|
||||
if not scopes:
|
||||
logger.debug(f"Using default scopes for provider {self.PROVIDER_NAME}")
|
||||
scopes = self.DEFAULT_SCOPES
|
||||
return scopes
|
||||
|
||||
@@ -41,7 +41,9 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str]
|
||||
) -> OAuth2Credentials:
|
||||
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import logging
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
from google.auth.external_account_authorized_user import (
|
||||
Credentials as ExternalAccountCredentials,
|
||||
@@ -9,6 +11,8 @@ from pydantic import SecretStr
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
@@ -17,6 +21,11 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
|
||||
PROVIDER_NAME = "google"
|
||||
EMAIL_ENDPOINT = "https://www.googleapis.com/oauth2/v2/userinfo"
|
||||
DEFAULT_SCOPES = [
|
||||
"https://www.googleapis.com/auth/userinfo.email",
|
||||
"https://www.googleapis.com/auth/userinfo.profile",
|
||||
"openid",
|
||||
]
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
@@ -25,7 +34,9 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
self.token_uri = "https://oauth2.googleapis.com/token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
flow = self._setup_oauth_flow(scopes)
|
||||
all_scopes = list(set(scopes + self.DEFAULT_SCOPES))
|
||||
logger.debug(f"Setting up OAuth flow with scopes: {all_scopes}")
|
||||
flow = self._setup_oauth_flow(all_scopes)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
authorization_url, _ = flow.authorization_url(
|
||||
access_type="offline",
|
||||
@@ -35,29 +46,57 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
)
|
||||
return authorization_url
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
flow = self._setup_oauth_flow(None)
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str]
|
||||
) -> OAuth2Credentials:
|
||||
logger.debug(f"Exchanging code for tokens with scopes: {scopes}")
|
||||
|
||||
# Use the scopes from the initial request
|
||||
flow = self._setup_oauth_flow(scopes)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
flow.fetch_token(code=code)
|
||||
|
||||
logger.debug("Fetching token from Google")
|
||||
|
||||
# Disable scope check in fetch_token
|
||||
flow.oauth2session.scope = None
|
||||
token = flow.fetch_token(code=code)
|
||||
logger.debug("Token fetched successfully")
|
||||
|
||||
# Get the actual scopes granted by Google
|
||||
granted_scopes: list[str] = token.get("scope", [])
|
||||
|
||||
logger.debug(f"Scopes granted by Google: {granted_scopes}")
|
||||
|
||||
google_creds = flow.credentials
|
||||
username = self._request_email(google_creds)
|
||||
logger.debug(f"Received credentials: {google_creds}")
|
||||
|
||||
logger.debug("Requesting user email")
|
||||
username = self._request_email(google_creds)
|
||||
logger.debug(f"User email retrieved: {username}")
|
||||
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.token
|
||||
assert google_creds.refresh_token
|
||||
assert google_creds.expiry
|
||||
assert google_creds.scopes
|
||||
return OAuth2Credentials(
|
||||
assert granted_scopes
|
||||
|
||||
# Create OAuth2Credentials with the granted scopes
|
||||
credentials = OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=None,
|
||||
username=username,
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
refresh_token=(SecretStr(google_creds.refresh_token)),
|
||||
access_token_expires_at=(
|
||||
int(google_creds.expiry.timestamp()) if google_creds.expiry else None
|
||||
),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=google_creds.scopes,
|
||||
scopes=granted_scopes,
|
||||
)
|
||||
logger.debug(
|
||||
f"OAuth2Credentials object created successfully with scopes: {credentials.scopes}"
|
||||
)
|
||||
|
||||
return credentials
|
||||
|
||||
def _request_email(
|
||||
self, creds: Credentials | ExternalAccountCredentials
|
||||
@@ -65,6 +104,9 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
session = AuthorizedSession(creds)
|
||||
response = session.get(self.EMAIL_ENDPOINT)
|
||||
if not response.ok:
|
||||
logger.error(
|
||||
f"Failed to get user email. Status code: {response.status_code}"
|
||||
)
|
||||
return None
|
||||
return response.json()["email"]
|
||||
|
||||
@@ -99,7 +141,7 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _setup_oauth_flow(self, scopes: list[str] | None) -> Flow:
|
||||
def _setup_oauth_flow(self, scopes: list[str]) -> Flow:
|
||||
return Flow.from_client_config(
|
||||
{
|
||||
"web": {
|
||||
|
||||
@@ -35,7 +35,9 @@ class NotionOAuthHandler(BaseOAuthHandler):
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str]
|
||||
) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
|
||||
@@ -17,11 +17,10 @@ from backend.data import graph as graph_db
|
||||
from backend.data import user as user_db
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.queue import AsyncEventQueue, AsyncRedisEventQueue
|
||||
from backend.data.queue import RedisEventQueue
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler
|
||||
from backend.server.model import CreateGraph, SetGraphActiveVersion
|
||||
from backend.util.lock import KeyedMutex
|
||||
from backend.util.service import AppService, expose, get_service_client
|
||||
from backend.util.settings import Config, Settings
|
||||
|
||||
@@ -32,24 +31,23 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AgentServer(AppService):
|
||||
mutex = KeyedMutex()
|
||||
use_redis = True
|
||||
use_queue = True
|
||||
_test_dependency_overrides = {}
|
||||
_user_credit_model = get_user_credit_model()
|
||||
|
||||
def __init__(self, event_queue: AsyncEventQueue | None = None):
|
||||
def __init__(self):
|
||||
super().__init__(port=Config().agent_server_port)
|
||||
self.event_queue = event_queue or AsyncRedisEventQueue()
|
||||
self.event_queue = RedisEventQueue()
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(self, _: FastAPI):
|
||||
await db.connect()
|
||||
self.run_and_wait(self.event_queue.connect())
|
||||
self.event_queue.connect()
|
||||
await block.initialize_blocks()
|
||||
if await user_db.create_default_user(settings.config.enable_auth):
|
||||
await graph_db.import_packaged_templates()
|
||||
yield
|
||||
await self.event_queue.close()
|
||||
self.event_queue.close()
|
||||
await db.disconnect()
|
||||
|
||||
def run_service(self):
|
||||
@@ -616,15 +614,7 @@ class AgentServer(AppService):
|
||||
@expose
|
||||
def send_execution_update(self, execution_result_dict: dict[Any, Any]):
|
||||
execution_result = execution_db.ExecutionResult(**execution_result_dict)
|
||||
self.run_and_wait(self.event_queue.put(execution_result))
|
||||
|
||||
@expose
|
||||
def acquire_lock(self, key: Any):
|
||||
self.mutex.lock(key)
|
||||
|
||||
@expose
|
||||
def release_lock(self, key: Any):
|
||||
self.mutex.unlock(key)
|
||||
self.event_queue.put(execution_result)
|
||||
|
||||
@classmethod
|
||||
def update_configuration(
|
||||
|
||||
@@ -54,10 +54,11 @@ async def login(
|
||||
) -> LoginResponse:
|
||||
handler = _get_provider_oauth_handler(request, provider)
|
||||
|
||||
# Generate and store a secure random state token
|
||||
state_token = await store.store_state_token(user_id, provider)
|
||||
|
||||
requested_scopes = scopes.split(",") if scopes else []
|
||||
|
||||
# Generate and store a secure random state token along with the scopes
|
||||
state_token = await store.store_state_token(user_id, provider, requested_scopes)
|
||||
|
||||
login_url = handler.get_login_url(requested_scopes, state_token)
|
||||
|
||||
return LoginResponse(login_url=login_url, state_token=state_token)
|
||||
@@ -80,20 +81,44 @@ async def callback(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
request: Request,
|
||||
) -> CredentialsMetaResponse:
|
||||
logger.debug(f"Received OAuth callback for provider: {provider}")
|
||||
handler = _get_provider_oauth_handler(request, provider)
|
||||
|
||||
# Verify the state token
|
||||
if not await store.verify_state_token(user_id, state_token, provider):
|
||||
logger.warning(f"Invalid or expired state token for user {user_id}")
|
||||
raise HTTPException(status_code=400, detail="Invalid or expired state token")
|
||||
|
||||
try:
|
||||
credentials = handler.exchange_code_for_tokens(code)
|
||||
scopes = await store.get_any_valid_scopes_from_state_token(
|
||||
user_id, state_token, provider
|
||||
)
|
||||
logger.debug(f"Retrieved scopes from state token: {scopes}")
|
||||
|
||||
scopes = handler.handle_default_scopes(scopes)
|
||||
|
||||
credentials = handler.exchange_code_for_tokens(code, scopes)
|
||||
logger.debug(f"Received credentials with final scopes: {credentials.scopes}")
|
||||
|
||||
# Check if the granted scopes are sufficient for the requested scopes
|
||||
if not set(scopes).issubset(set(credentials.scopes)):
|
||||
# For now, we'll just log the warning and continue
|
||||
logger.warning(
|
||||
f"Granted scopes {credentials.scopes} for {provider}do not include all requested scopes {scopes}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Code->Token exchange failed for provider {provider}: {e}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
logger.error(f"Code->Token exchange failed for provider {provider}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Failed to exchange code for tokens: {str(e)}"
|
||||
)
|
||||
|
||||
# TODO: Allow specifying `title` to set on `credentials`
|
||||
store.add_creds(user_id, credentials)
|
||||
|
||||
logger.debug(
|
||||
f"Successfully processed OAuth callback for user {user_id} and provider {provider}"
|
||||
)
|
||||
return CredentialsMetaResponse(
|
||||
id=credentials.id,
|
||||
type=credentials.type,
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import uvicorn
|
||||
from autogpt_libs.auth import parse_jwt_token
|
||||
from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.data.queue import AsyncRedisEventQueue
|
||||
from backend.data.queue import RedisEventQueue
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.server.conn_manager import ConnectionManager
|
||||
from backend.server.model import ExecutionSubscription, Methods, WsMessage
|
||||
@@ -16,8 +17,19 @@ from backend.util.settings import Config, Settings
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
app = FastAPI()
|
||||
event_queue = AsyncRedisEventQueue()
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
event_queue.connect()
|
||||
manager = get_connection_manager()
|
||||
fut = asyncio.create_task(event_broadcaster(manager))
|
||||
fut.add_done_callback(lambda _: logger.info("Event broadcaster stopped"))
|
||||
yield
|
||||
event_queue.close()
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
event_queue = RedisEventQueue()
|
||||
_connection_manager = None
|
||||
|
||||
logger.info(f"CORS allow origins: {settings.config.backend_cors_allow_origins}")
|
||||
@@ -37,23 +49,13 @@ def get_connection_manager():
|
||||
return _connection_manager
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
await event_queue.connect()
|
||||
manager = get_connection_manager()
|
||||
asyncio.create_task(event_broadcaster(manager))
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
await event_queue.close()
|
||||
|
||||
|
||||
async def event_broadcaster(manager: ConnectionManager):
|
||||
while True:
|
||||
event = await event_queue.get()
|
||||
event = event_queue.get()
|
||||
if event is not None:
|
||||
await manager.send_execution_result(event)
|
||||
else:
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
|
||||
async def authenticate_websocket(websocket: WebSocket) -> str:
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
from threading import Lock
|
||||
from typing import Any
|
||||
|
||||
from expiringdict import ExpiringDict
|
||||
|
||||
|
||||
class KeyedMutex:
|
||||
"""
|
||||
This class provides a mutex that can be locked and unlocked by a specific key.
|
||||
It uses an ExpiringDict to automatically clear the mutex after a specified timeout,
|
||||
in case the key is not unlocked for a specified duration, to prevent memory leaks.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.locks: dict[Any, tuple[Lock, int]] = ExpiringDict(
|
||||
max_len=6000, max_age_seconds=60
|
||||
)
|
||||
self.locks_lock = Lock()
|
||||
|
||||
def lock(self, key: Any):
|
||||
with self.locks_lock:
|
||||
lock, request_count = self.locks.get(key, (Lock(), 0))
|
||||
self.locks[key] = (lock, request_count + 1)
|
||||
lock.acquire()
|
||||
|
||||
def unlock(self, key: Any):
|
||||
with self.locks_lock:
|
||||
lock, request_count = self.locks.pop(key)
|
||||
if request_count > 1:
|
||||
self.locks[key] = (lock, request_count - 1)
|
||||
lock.release()
|
||||
@@ -10,6 +10,11 @@ from backend.util.logging import configure_logging
|
||||
from backend.util.metrics import sentry_init
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
_SERVICE_NAME = "MainProcess"
|
||||
|
||||
|
||||
def get_service_name():
|
||||
return _SERVICE_NAME
|
||||
|
||||
|
||||
class AppProcess(ABC):
|
||||
@@ -32,6 +37,11 @@ class AppProcess(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def service_name(cls) -> str:
|
||||
return cls.__name__
|
||||
|
||||
def cleanup(self):
|
||||
"""
|
||||
Implement this method on a subclass to do post-execution cleanup,
|
||||
@@ -52,10 +62,14 @@ class AppProcess(ABC):
|
||||
if silent:
|
||||
sys.stdout = open(os.devnull, "w")
|
||||
sys.stderr = open(os.devnull, "w")
|
||||
logger.info(f"[{self.__class__.__name__}] Starting...")
|
||||
|
||||
global _SERVICE_NAME
|
||||
_SERVICE_NAME = self.service_name
|
||||
|
||||
logger.info(f"[{self.service_name}] Starting...")
|
||||
self.run()
|
||||
except (KeyboardInterrupt, SystemExit) as e:
|
||||
logger.warning(f"[{self.__class__.__name__}] Terminated: {e}; quitting...")
|
||||
logger.warning(f"[{self.service_name}] Terminated: {e}; quitting...")
|
||||
|
||||
def _self_terminate(self, signum: int, frame):
|
||||
self.cleanup()
|
||||
|
||||
@@ -1,7 +1,48 @@
|
||||
import logging
|
||||
import os
|
||||
from uuid import uuid4
|
||||
|
||||
from tenacity import retry, stop_after_attempt, wait_exponential
|
||||
|
||||
conn_retry = retry(
|
||||
stop=stop_after_attempt(30),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=30),
|
||||
reraise=True,
|
||||
)
|
||||
from backend.util.process import get_service_name
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _log_prefix(resource_name: str, conn_id: str):
|
||||
"""
|
||||
Returns a prefix string for logging purposes.
|
||||
This needs to be called on the fly to get the current process ID & service name,
|
||||
not the parent process ID & service name.
|
||||
"""
|
||||
return f"[PID-{os.getpid()}|{get_service_name()}|{resource_name}-{conn_id}]"
|
||||
|
||||
|
||||
def conn_retry(resource_name: str, action_name: str, max_retry: int = 5):
|
||||
conn_id = str(uuid4())
|
||||
|
||||
def before_call(retry_state):
|
||||
prefix = _log_prefix(resource_name, conn_id)
|
||||
logger.info(f"{prefix} {action_name} started...")
|
||||
|
||||
def after_call(retry_state):
|
||||
prefix = _log_prefix(resource_name, conn_id)
|
||||
if retry_state.outcome.failed:
|
||||
# Optionally, you can log something here if needed
|
||||
pass
|
||||
else:
|
||||
logger.info(f"{prefix} {action_name} completed!")
|
||||
|
||||
def on_retry(retry_state):
|
||||
prefix = _log_prefix(resource_name, conn_id)
|
||||
exception = retry_state.outcome.exception()
|
||||
logger.info(f"{prefix} {action_name} failed: {exception}. Retrying now...")
|
||||
|
||||
return retry(
|
||||
stop=stop_after_attempt(max_retry + 1),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=30),
|
||||
before=before_call,
|
||||
after=after_call,
|
||||
before_sleep=on_retry,
|
||||
reraise=True,
|
||||
)
|
||||
|
||||
@@ -10,7 +10,7 @@ import Pyro5.api
|
||||
from Pyro5 import api as pyro
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.queue import AsyncEventQueue, AsyncRedisEventQueue
|
||||
from backend.data.queue import AbstractEventQueue, RedisEventQueue
|
||||
from backend.util.process import AppProcess
|
||||
from backend.util.retry import conn_retry
|
||||
from backend.util.settings import Config, Secrets
|
||||
@@ -45,20 +45,15 @@ def expose(func: C) -> C:
|
||||
|
||||
class AppService(AppProcess):
|
||||
shared_event_loop: asyncio.AbstractEventLoop
|
||||
event_queue: AsyncEventQueue = AsyncRedisEventQueue()
|
||||
event_queue: AbstractEventQueue = RedisEventQueue()
|
||||
use_db: bool = False
|
||||
use_redis: bool = False
|
||||
use_queue: bool = False
|
||||
use_supabase: bool = False
|
||||
|
||||
def __init__(self, port):
|
||||
self.port = port
|
||||
self.uri = None
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def service_name(cls) -> str:
|
||||
return cls.__name__
|
||||
|
||||
@abstractmethod
|
||||
def run_service(self):
|
||||
while True:
|
||||
@@ -75,8 +70,8 @@ class AppService(AppProcess):
|
||||
self.shared_event_loop = asyncio.get_event_loop()
|
||||
if self.use_db:
|
||||
self.shared_event_loop.run_until_complete(db.connect())
|
||||
if self.use_redis:
|
||||
self.shared_event_loop.run_until_complete(self.event_queue.connect())
|
||||
if self.use_queue:
|
||||
self.event_queue.connect()
|
||||
if self.use_supabase:
|
||||
from supabase import create_client
|
||||
|
||||
@@ -102,11 +97,11 @@ class AppService(AppProcess):
|
||||
if self.use_db:
|
||||
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting DB...")
|
||||
self.run_and_wait(db.disconnect())
|
||||
if self.use_redis:
|
||||
if self.use_queue:
|
||||
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting Redis...")
|
||||
self.run_and_wait(self.event_queue.close())
|
||||
self.event_queue.close()
|
||||
|
||||
@conn_retry
|
||||
@conn_retry("Pyro", "Starting Pyro Service")
|
||||
def __start_pyro(self):
|
||||
host = Config().pyro_host
|
||||
daemon = Pyro5.api.Daemon(host=host, port=self.port)
|
||||
@@ -125,7 +120,7 @@ def get_service_client(service_type: Type[AS], port: int) -> AS:
|
||||
service_name = service_type.service_name
|
||||
|
||||
class DynamicClient:
|
||||
@conn_retry
|
||||
@conn_retry("Pyro", f"Connecting to [{service_name}]")
|
||||
def __init__(self):
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
|
||||
uri = f"PYRO:{service_type.service_name}@{host}:{port}"
|
||||
|
||||
@@ -207,7 +207,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
medium_author_id: str = Field(default="", description="Medium author ID")
|
||||
did_api_key: str = Field(default="", description="D-ID API Key")
|
||||
revid_api_key: str = Field(default="", description="revid.ai API key")
|
||||
|
||||
discord_bot_token: str = Field(default="", description="Discord bot token")
|
||||
|
||||
smtp_server: str = Field(default="", description="SMTP server IP")
|
||||
@@ -220,6 +219,9 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
google_maps_api_key: str = Field(default="", description="Google Maps API Key")
|
||||
|
||||
replicate_api_key: str = Field(default="", description="Replicate API Key")
|
||||
unreal_speech_api_key: str = Field(default="", description="Unreal Speech API Key")
|
||||
ideogram_api_key: str = Field(default="", description="Ideogram API Key")
|
||||
|
||||
# Add more secret fields as needed
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import asyncio
|
||||
import time
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, initialize_blocks
|
||||
from backend.data.execution import ExecutionResult, ExecutionStatus
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.data.queue import AsyncEventQueue
|
||||
from backend.data.user import create_default_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler
|
||||
from backend.server import AgentServer
|
||||
@@ -14,44 +12,10 @@ from backend.server.rest_api import get_user_id
|
||||
log = print
|
||||
|
||||
|
||||
class InMemoryAsyncEventQueue(AsyncEventQueue):
|
||||
def __init__(self):
|
||||
self.queue = asyncio.Queue()
|
||||
self.connected = False
|
||||
self.closed = False
|
||||
|
||||
async def connect(self):
|
||||
if not self.connected:
|
||||
self.connected = True
|
||||
return
|
||||
|
||||
async def close(self):
|
||||
self.closed = True
|
||||
self.connected = False
|
||||
return
|
||||
|
||||
async def put(self, execution_result: ExecutionResult):
|
||||
if not self.connected:
|
||||
raise RuntimeError("Queue is not connected")
|
||||
await self.queue.put(execution_result)
|
||||
|
||||
async def get(self):
|
||||
if self.closed:
|
||||
return None
|
||||
if not self.connected:
|
||||
raise RuntimeError("Queue is not connected")
|
||||
try:
|
||||
item = await asyncio.wait_for(self.queue.get(), timeout=0.1)
|
||||
return item
|
||||
except asyncio.TimeoutError:
|
||||
return None
|
||||
|
||||
|
||||
class SpinTestServer:
|
||||
def __init__(self):
|
||||
self.exec_manager = ExecutionManager()
|
||||
self.in_memory_queue = InMemoryAsyncEventQueue()
|
||||
self.agent_server = AgentServer(event_queue=self.in_memory_queue)
|
||||
self.agent_server = AgentServer()
|
||||
self.scheduler = ExecutionScheduler()
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import json
|
||||
from typing import Any, Type, TypeVar, get_origin
|
||||
from typing import Any, Type, TypeVar, get_args, get_origin
|
||||
|
||||
|
||||
class ConversionError(Exception):
|
||||
@@ -103,26 +103,75 @@ def __convert_bool(value: Any) -> bool:
|
||||
|
||||
|
||||
def convert(value: Any, target_type: Type):
|
||||
target_type = get_origin(target_type) or target_type
|
||||
if target_type not in [list, dict, tuple, str, set, int, float, bool]:
|
||||
origin = get_origin(target_type)
|
||||
args = get_args(target_type)
|
||||
if origin is None:
|
||||
origin = target_type
|
||||
if origin not in [list, dict, tuple, str, set, int, float, bool]:
|
||||
return value
|
||||
if isinstance(value, target_type):
|
||||
return value
|
||||
if target_type is list:
|
||||
return __convert_list(value)
|
||||
elif target_type is dict:
|
||||
return __convert_dict(value)
|
||||
elif target_type is tuple:
|
||||
return __convert_tuple(value)
|
||||
elif target_type is str:
|
||||
return __convert_str(value)
|
||||
elif target_type is set:
|
||||
return __convert_set(value)
|
||||
elif target_type is int:
|
||||
return __convert_num(value, int)
|
||||
elif target_type is float:
|
||||
return __convert_num(value, float)
|
||||
elif target_type is bool:
|
||||
return __convert_bool(value)
|
||||
|
||||
# Handle the case when value is already of the target type
|
||||
if isinstance(value, origin):
|
||||
if not args:
|
||||
return value
|
||||
else:
|
||||
# Need to convert elements
|
||||
if origin is list:
|
||||
return [convert(v, args[0]) for v in value]
|
||||
elif origin is tuple:
|
||||
# Tuples can have multiple types
|
||||
if len(args) == 1:
|
||||
return tuple(convert(v, args[0]) for v in value)
|
||||
else:
|
||||
return tuple(convert(v, t) for v, t in zip(value, args))
|
||||
elif origin is dict:
|
||||
key_type, val_type = args
|
||||
return {
|
||||
convert(k, key_type): convert(v, val_type) for k, v in value.items()
|
||||
}
|
||||
elif origin is set:
|
||||
return {convert(v, args[0]) for v in value}
|
||||
else:
|
||||
return value
|
||||
else:
|
||||
return value
|
||||
# Need to convert value to the origin type
|
||||
if origin is list:
|
||||
value = __convert_list(value)
|
||||
if args:
|
||||
return [convert(v, args[0]) for v in value]
|
||||
else:
|
||||
return value
|
||||
elif origin is dict:
|
||||
value = __convert_dict(value)
|
||||
if args:
|
||||
key_type, val_type = args
|
||||
return {
|
||||
convert(k, key_type): convert(v, val_type) for k, v in value.items()
|
||||
}
|
||||
else:
|
||||
return value
|
||||
elif origin is tuple:
|
||||
value = __convert_tuple(value)
|
||||
if args:
|
||||
if len(args) == 1:
|
||||
return tuple(convert(v, args[0]) for v in value)
|
||||
else:
|
||||
return tuple(convert(v, t) for v, t in zip(value, args))
|
||||
else:
|
||||
return value
|
||||
elif origin is str:
|
||||
return __convert_str(value)
|
||||
elif origin is set:
|
||||
value = __convert_set(value)
|
||||
if args:
|
||||
return {convert(v, args[0]) for v in value}
|
||||
else:
|
||||
return value
|
||||
elif origin is int:
|
||||
return __convert_num(value, int)
|
||||
elif origin is float:
|
||||
return __convert_num(value, float)
|
||||
elif origin is bool:
|
||||
return __convert_bool(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"nodes": [
|
||||
{
|
||||
"id": "b8138bca-7892-42c2-9594-a845d3483413",
|
||||
"block_id": "d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t",
|
||||
"block_id": "df06086a-d5ac-4abb-9996-2ad0acb2eff7",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
@@ -59,7 +59,7 @@
|
||||
},
|
||||
{
|
||||
"id": "dda2d061-2ef9-4dc5-9433-918c8395a4ac",
|
||||
"block_id": "h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6",
|
||||
"block_id": "d0822ab5-9f8a-44a3-8971-531dd0178b6b",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
|
||||
@@ -110,7 +110,7 @@
|
||||
},
|
||||
{
|
||||
"id": "b45cfa51-5ead-4621-9f1c-f847dfea3e4c",
|
||||
"block_id": "d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t",
|
||||
"block_id": "df06086a-d5ac-4abb-9996-2ad0acb2eff7",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
@@ -146,7 +146,7 @@
|
||||
},
|
||||
{
|
||||
"id": "8eedcf71-1146-4f54-b522-bf9b6e2d26b2",
|
||||
"block_id": "h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6",
|
||||
"block_id": "d0822ab5-9f8a-44a3-8971-531dd0178b6b",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
@@ -197,7 +197,7 @@
|
||||
},
|
||||
{
|
||||
"id": "a568daee-45d2-4429-bf33-cbe9e1261f7b",
|
||||
"block_id": "c3d4e5f6-g7h8-i9j0-k1l2-m3n4o5p6q7r8",
|
||||
"block_id": "32a87eab-381e-4dd4-bdb8-4c47151be35a",
|
||||
"input_default": {
|
||||
"model": "llama-3.1-70b-versatile",
|
||||
"max_tokens": 2000
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"nodes": [
|
||||
{
|
||||
"id": "60ba4aac-1751-4be7-8745-1bd32191d4a2",
|
||||
"block_id": "d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t",
|
||||
"block_id": "df06086a-d5ac-4abb-9996-2ad0acb2eff7",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
@@ -45,7 +45,7 @@
|
||||
},
|
||||
{
|
||||
"id": "5658c4f7-8e67-4d30-93f2-157bdbd3ef87",
|
||||
"block_id": "b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7",
|
||||
"block_id": "87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
@@ -118,7 +118,7 @@
|
||||
},
|
||||
{
|
||||
"id": "f3d62f22-d193-4f04-85d2-164200fca4c0",
|
||||
"block_id": "h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6",
|
||||
"block_id": "d0822ab5-9f8a-44a3-8971-531dd0178b6b",
|
||||
"input_default": {},
|
||||
"metadata": {
|
||||
"position": {
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"nodes": [
|
||||
{
|
||||
"id": "382efac9-3def-4baf-b16a-d6d2512a5c8b",
|
||||
"block_id": "b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7",
|
||||
"block_id": "87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
"input_default": {
|
||||
"query": "19th July 2024 Microsoft Blackout"
|
||||
},
|
||||
@@ -44,7 +44,7 @@
|
||||
},
|
||||
{
|
||||
"id": "0cd8f670-8956-4942-ba28-aee732ec783f",
|
||||
"block_id": "b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6",
|
||||
"block_id": "0e50422c-6dee-4145-83d6-3a5a392f65de",
|
||||
"input_default": {
|
||||
"key": "TITLE"
|
||||
},
|
||||
@@ -57,7 +57,7 @@
|
||||
},
|
||||
{
|
||||
"id": "4a15b6b9-036d-43d3-915a-7e931fbc6522",
|
||||
"block_id": "b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6",
|
||||
"block_id": "0e50422c-6dee-4145-83d6-3a5a392f65de",
|
||||
"input_default": {
|
||||
"key": "CONTENT"
|
||||
},
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
-- Update AgentBlock IDs: this should cascade to the AgentNode and UserBlockCredit tables
|
||||
UPDATE "AgentBlock"
|
||||
SET "id" = CASE
|
||||
WHEN "id" = 'a1b2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6' THEN '436c3984-57fd-4b85-8e9a-459b356883bd'
|
||||
WHEN "id" = 'b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6' THEN '0e50422c-6dee-4145-83d6-3a5a392f65de'
|
||||
WHEN "id" = 'c3d4e5f6-7g8h-9i0j-1k2l-m3n4o5p6q7r8' THEN 'a0a69be1-4528-491c-a85a-a4ab6873e3f0'
|
||||
WHEN "id" = 'c3d4e5f6-g7h8-i9j0-k1l2-m3n4o5p6q7r8' THEN '32a87eab-381e-4dd4-bdb8-4c47151be35a'
|
||||
WHEN "id" = 'b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7' THEN '87840993-2053-44b7-8da4-187ad4ee518c'
|
||||
WHEN "id" = 'h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6' THEN 'd0822ab5-9f8a-44a3-8971-531dd0178b6b'
|
||||
WHEN "id" = 'd3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t' THEN 'df06086a-d5ac-4abb-9996-2ad0acb2eff7'
|
||||
WHEN "id" = 'h5e7f8g9-1b2c-3d4e-5f6g-7h8i9j0k1l2m' THEN 'f5b0f5d0-1862-4d61-94be-3ad0fa772760'
|
||||
WHEN "id" = 'a1234567-89ab-cdef-0123-456789abcdef' THEN '4335878a-394e-4e67-adf2-919877ff49ae'
|
||||
WHEN "id" = 'f8e7d6c5-b4a3-2c1d-0e9f-8g7h6i5j4k3l' THEN 'f66a3543-28d3-4ab5-8945-9b336371e2ce'
|
||||
WHEN "id" = 'b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0h2' THEN '716a67b3-6760-42e7-86dc-18645c6e00fc'
|
||||
WHEN "id" = '31d1064e-7446-4693-o7d4-65e5ca9110d1' THEN 'cc10ff7b-7753-4ff2-9af6-9399b1a7eddc'
|
||||
WHEN "id" = 'c6731acb-4105-4zp1-bc9b-03d0036h370g' THEN '5ebe6768-8e5d-41e3-9134-1c7bd89a8d52'
|
||||
ELSE "id"
|
||||
END;
|
||||
@@ -6,7 +6,6 @@ from backend.util.service import AppService, expose, get_service_client
|
||||
class TestService(AppService):
|
||||
def __init__(self):
|
||||
super().__init__(port=8005)
|
||||
self.use_redis = False
|
||||
|
||||
def run_service(self):
|
||||
super().run_service()
|
||||
|
||||
@@ -27,5 +27,6 @@ def test_type_conversion():
|
||||
|
||||
from typing import List
|
||||
|
||||
# assert convert("5", List[int]) == [5]
|
||||
assert convert("5", List[int]) == [5]
|
||||
assert convert("[5,4,2]", List[int]) == [5, 4, 2]
|
||||
assert convert([5, 4, 2], List[str]) == ["5", "4", "2"]
|
||||
|
||||
@@ -207,6 +207,7 @@ services:
|
||||
# - NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
# - NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
# - NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
# - NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
# ports:
|
||||
# - "3000:3000"
|
||||
# networks:
|
||||
|
||||
@@ -13,3 +13,6 @@ NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAic
|
||||
## Only used if you're using Supabase and OAuth
|
||||
AUTH_CALLBACK_URL=http://localhost:3000/auth/callback
|
||||
GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
|
||||
# When running locally, set NEXT_PUBLIC_BEHAVE_AS=CLOUD to use the a locally hosted marketplace (as is typical in development, and the cloud deployment), otherwise set it to LOCAL to have the marketplace open in a new tab
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
@@ -66,7 +66,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.47.1",
|
||||
"@types/node": "^20",
|
||||
"@types/node": "^22.7.3",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-modal": "^3.16.3",
|
||||
|
||||
@@ -9,7 +9,8 @@ export async function GET(request: Request) {
|
||||
const code = searchParams.get("code");
|
||||
const state = searchParams.get("state");
|
||||
|
||||
// Send message from popup window to host window
|
||||
console.debug("OAuth callback received:", { code, state });
|
||||
|
||||
const message: OAuthPopupResultMessage =
|
||||
code && state
|
||||
? { message_type: "oauth_popup_result", success: true, code, state }
|
||||
@@ -19,13 +20,15 @@ export async function GET(request: Request) {
|
||||
message: `Incomplete query: ${searchParams.toString()}`,
|
||||
};
|
||||
|
||||
console.debug("Sending message to opener:", message);
|
||||
|
||||
// Return a response with the message as JSON and a script to close the window
|
||||
return new NextResponse(
|
||||
`
|
||||
<html>
|
||||
<body>
|
||||
<script>
|
||||
window.postMessage(${JSON.stringify(message)});
|
||||
window.opener.postMessage(${JSON.stringify(message)});
|
||||
window.close();
|
||||
</script>
|
||||
</body>
|
||||
|
||||
@@ -27,7 +27,7 @@ const loginFormSchema = z.object({
|
||||
email: z.string().email().min(2).max(64),
|
||||
password: z.string().min(6).max(64),
|
||||
agreeToTerms: z.boolean().refine((value) => value === true, {
|
||||
message: "You must agree to the Terms of Service and Privacy Policy",
|
||||
message: "You must agree to the Terms of Use and Privacy Policy",
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -191,8 +191,11 @@ export default function LoginPage() {
|
||||
<div className="space-y-1 leading-none">
|
||||
<FormLabel>
|
||||
I agree to the{" "}
|
||||
<Link href="/terms-of-service" className="underline">
|
||||
Terms of Service
|
||||
<Link
|
||||
href="https://auto-gpt.notion.site/Terms-of-Use-11400ef5bece80d0b087d7831c5fd6bf"
|
||||
className="underline"
|
||||
>
|
||||
Terms of Use
|
||||
</Link>{" "}
|
||||
and{" "}
|
||||
<Link
|
||||
|
||||
@@ -38,7 +38,7 @@ import { IconUndo2, IconRedo2 } from "@/components/ui/icons";
|
||||
import { startTutorial } from "./tutorial";
|
||||
import useAgentGraph from "@/hooks/useAgentGraph";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { useRouter, usePathname } from "next/navigation";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import RunnerUIWrapper, {
|
||||
RunnerUIWrapperRef,
|
||||
} from "@/components/RunnerUIWrapper";
|
||||
@@ -91,41 +91,32 @@ const FlowEditor: React.FC<{
|
||||
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
const params = useSearchParams();
|
||||
const initialPositionRef = useRef<{
|
||||
[key: string]: { x: number; y: number };
|
||||
}>({});
|
||||
const isDragging = useRef(false);
|
||||
|
||||
// State to control if tutorial has started
|
||||
const [tutorialStarted, setTutorialStarted] = useState(false);
|
||||
// State to control if blocks menu should be pinned open
|
||||
const [pinBlocksPopover, setPinBlocksPopover] = useState(false);
|
||||
// State to control if save popover should be pinned open
|
||||
const [pinSavePopover, setPinSavePopover] = useState(false);
|
||||
|
||||
const runnerUIRef = useRef<RunnerUIWrapperRef>(null);
|
||||
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
const TUTORIAL_STORAGE_KEY = "shepherd-tour";
|
||||
|
||||
// If resetting tutorial
|
||||
useEffect(() => {
|
||||
if (params.get("resetTutorial") === "true") {
|
||||
localStorage.removeItem("shepherd-tour"); // Clear tutorial flag
|
||||
localStorage.removeItem(TUTORIAL_STORAGE_KEY);
|
||||
router.push(pathname);
|
||||
} else {
|
||||
// Otherwise, start tutorial if conditions are met
|
||||
const shouldStartTutorial = !localStorage.getItem("shepherd-tour");
|
||||
if (
|
||||
shouldStartTutorial &&
|
||||
availableNodes.length > 0 &&
|
||||
!tutorialStarted
|
||||
) {
|
||||
startTutorial(setPinBlocksPopover);
|
||||
setTutorialStarted(true);
|
||||
localStorage.setItem("shepherd-tour", "yes");
|
||||
}
|
||||
} else if (!localStorage.getItem(TUTORIAL_STORAGE_KEY)) {
|
||||
startTutorial(setPinBlocksPopover, setPinSavePopover);
|
||||
localStorage.setItem(TUTORIAL_STORAGE_KEY, "yes");
|
||||
}
|
||||
}, [availableNodes, tutorialStarted, router, pathname]);
|
||||
}, [availableNodes, router, pathname, params]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
@@ -597,6 +588,7 @@ const FlowEditor: React.FC<{
|
||||
onDescriptionChange={setAgentDescription}
|
||||
agentName={agentName}
|
||||
onNameChange={setAgentName}
|
||||
pinSavePopover={pinSavePopover}
|
||||
/>
|
||||
}
|
||||
></ControlPanel>
|
||||
|
||||
@@ -6,8 +6,9 @@ import Image from "next/image";
|
||||
import getServerUser from "@/hooks/getServerUser";
|
||||
import ProfileDropdown from "./ProfileDropdown";
|
||||
import { IconCircleUser, IconMenu } from "@/components/ui/icons";
|
||||
import CreditButton from "@/components/CreditButton";
|
||||
import { NavBarButtons } from "./NavBarButtons";
|
||||
import CreditButton from "@/components/nav/CreditButton";
|
||||
|
||||
import { NavBarButtons } from "./nav/NavBarButtons";
|
||||
|
||||
export async function NavBar() {
|
||||
const isAvailable = Boolean(
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import Link from "next/link";
|
||||
import { BsBoxes } from "react-icons/bs";
|
||||
import { LuLaptop } from "react-icons/lu";
|
||||
import { LuShoppingCart } from "react-icons/lu";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { usePathname } from "next/navigation";
|
||||
|
||||
export function NavBarButtons({ className }: { className?: string }) {
|
||||
"use client";
|
||||
|
||||
const pathname = usePathname();
|
||||
const buttons = [
|
||||
{
|
||||
href: "/marketplace",
|
||||
text: "Marketplace",
|
||||
icon: <LuShoppingCart />,
|
||||
},
|
||||
{
|
||||
href: "/",
|
||||
text: "Monitor",
|
||||
icon: <LuLaptop />,
|
||||
},
|
||||
{
|
||||
href: "/build",
|
||||
text: "Build",
|
||||
icon: <BsBoxes />,
|
||||
},
|
||||
];
|
||||
|
||||
const activeButton = buttons.find((button) => button.href === pathname);
|
||||
|
||||
console.log(">>>> ", activeButton);
|
||||
|
||||
return buttons.map((button) => (
|
||||
<Link
|
||||
key={button.href}
|
||||
href={button.href}
|
||||
className={cn(
|
||||
className,
|
||||
"rounded-xl p-3",
|
||||
activeButton === button ? "button bg-gray-950 text-white" : "",
|
||||
)}
|
||||
>
|
||||
{button.icon} {button.text}
|
||||
</Link>
|
||||
));
|
||||
}
|
||||
@@ -62,6 +62,7 @@ const PrimaryActionBar: React.FC<PrimaryActionBarProps> = ({
|
||||
background: isRunning ? "#FFB3BA" : "#7544DF",
|
||||
opacity: isDisabled ? 0.5 : 1,
|
||||
}}
|
||||
data-id="primary-action-run-agent"
|
||||
>
|
||||
{runButtonIcon}
|
||||
<span className="text-sm font-medium md:text-lg">
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
import React, { useEffect, useState } from "react";
|
||||
import { Button } from "./ui/button";
|
||||
import { IconMegaphone } from "@/components/ui/icons";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
const TallyPopupSimple = () => {
|
||||
const [isFormVisible, setIsFormVisible] = useState(false);
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
// Load Tally script
|
||||
@@ -42,8 +44,7 @@ const TallyPopupSimple = () => {
|
||||
}
|
||||
|
||||
const resetTutorial = () => {
|
||||
const url = `${window.location.origin}/build?resetTutorial=true`;
|
||||
window.location.href = url;
|
||||
router.push("/build?resetTutorial=true");
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -33,6 +33,44 @@ const formSchema = z.object({
|
||||
importAsTemplate: z.boolean(),
|
||||
});
|
||||
|
||||
function updateBlockIDs(graph: Graph) {
|
||||
// https://github.com/Significant-Gravitas/AutoGPT/issues/8223
|
||||
const updatedBlockIDMap: Record<string, string> = {
|
||||
"a1b2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6":
|
||||
"436c3984-57fd-4b85-8e9a-459b356883bd",
|
||||
"b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6":
|
||||
"0e50422c-6dee-4145-83d6-3a5a392f65de",
|
||||
"c3d4e5f6-7g8h-9i0j-1k2l-m3n4o5p6q7r8":
|
||||
"a0a69be1-4528-491c-a85a-a4ab6873e3f0",
|
||||
"c3d4e5f6-g7h8-i9j0-k1l2-m3n4o5p6q7r8":
|
||||
"32a87eab-381e-4dd4-bdb8-4c47151be35a",
|
||||
"b2c3d4e5-6f7g-8h9i-0j1k-l2m3n4o5p6q7":
|
||||
"87840993-2053-44b7-8da4-187ad4ee518c",
|
||||
"h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6":
|
||||
"d0822ab5-9f8a-44a3-8971-531dd0178b6b",
|
||||
"d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t":
|
||||
"df06086a-d5ac-4abb-9996-2ad0acb2eff7",
|
||||
"h5e7f8g9-1b2c-3d4e-5f6g-7h8i9j0k1l2m":
|
||||
"f5b0f5d0-1862-4d61-94be-3ad0fa772760",
|
||||
"a1234567-89ab-cdef-0123-456789abcdef":
|
||||
"4335878a-394e-4e67-adf2-919877ff49ae",
|
||||
"f8e7d6c5-b4a3-2c1d-0e9f-8g7h6i5j4k3l":
|
||||
"f66a3543-28d3-4ab5-8945-9b336371e2ce",
|
||||
"b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0h2":
|
||||
"716a67b3-6760-42e7-86dc-18645c6e00fc",
|
||||
"31d1064e-7446-4693-o7d4-65e5ca9110d1":
|
||||
"cc10ff7b-7753-4ff2-9af6-9399b1a7eddc",
|
||||
"c6731acb-4105-4zp1-bc9b-03d0036h370g":
|
||||
"5ebe6768-8e5d-41e3-9134-1c7bd89a8d52",
|
||||
};
|
||||
graph.nodes
|
||||
.filter((node) => node.block_id in updatedBlockIDMap)
|
||||
.forEach((node) => {
|
||||
node.block_id = updatedBlockIDMap[node.block_id];
|
||||
});
|
||||
return graph;
|
||||
}
|
||||
|
||||
export const AgentImportForm: React.FC<
|
||||
React.FormHTMLAttributes<HTMLFormElement>
|
||||
> = ({ className, ...props }) => {
|
||||
@@ -116,6 +154,7 @@ export const AgentImportForm: React.FC<
|
||||
);
|
||||
}
|
||||
const agent = obj as Graph;
|
||||
updateBlockIDs(agent);
|
||||
setAgentObject(agent);
|
||||
form.setValue("agentName", agent.name);
|
||||
form.setValue("agentDescription", agent.description);
|
||||
|
||||
@@ -42,3 +42,7 @@
|
||||
.react-flow__edges > svg:has(> g.selected) {
|
||||
z-index: 10 !important;
|
||||
}
|
||||
|
||||
.react-flow__edgelabel-renderer {
|
||||
z-index: 11 !important;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ interface SaveControlProps {
|
||||
onSave: (isTemplate: boolean | undefined) => void;
|
||||
onNameChange: (name: string) => void;
|
||||
onDescriptionChange: (description: string) => void;
|
||||
pinSavePopover: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -41,6 +42,7 @@ export const SaveControl = ({
|
||||
onNameChange,
|
||||
agentDescription,
|
||||
onDescriptionChange,
|
||||
pinSavePopover,
|
||||
}: SaveControlProps) => {
|
||||
/**
|
||||
* Note for improvement:
|
||||
@@ -59,11 +61,15 @@ export const SaveControl = ({
|
||||
};
|
||||
|
||||
return (
|
||||
<Popover>
|
||||
<Popover open={pinSavePopover ? true : undefined}>
|
||||
<Tooltip delayDuration={500}>
|
||||
<TooltipTrigger asChild>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant="ghost" size="icon">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
data-id="save-control-popover-trigger"
|
||||
>
|
||||
<IconSave />
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
@@ -81,6 +87,7 @@ export const SaveControl = ({
|
||||
className="col-span-3"
|
||||
value={agentName}
|
||||
onChange={(e) => onNameChange(e.target.value)}
|
||||
data-id="save-control-name-input"
|
||||
/>
|
||||
<Label htmlFor="description">Description</Label>
|
||||
<Input
|
||||
@@ -89,6 +96,7 @@ export const SaveControl = ({
|
||||
className="col-span-3"
|
||||
value={agentDescription}
|
||||
onChange={(e) => onDescriptionChange(e.target.value)}
|
||||
data-id="save-control-description-input"
|
||||
/>
|
||||
{agentMeta?.version && (
|
||||
<>
|
||||
@@ -105,13 +113,18 @@ export const SaveControl = ({
|
||||
</div>
|
||||
</CardContent>
|
||||
<CardFooter className="flex flex-col items-stretch gap-2">
|
||||
<Button className="w-full" onClick={handleSave}>
|
||||
<Button
|
||||
className="w-full"
|
||||
onClick={handleSave}
|
||||
data-id="save-control-save-agent"
|
||||
>
|
||||
Save {getType()}
|
||||
</Button>
|
||||
{!agentMeta && (
|
||||
<Button
|
||||
variant="secondary"
|
||||
className="w-full"
|
||||
data-id="save-control-template-button"
|
||||
onClick={() => {
|
||||
isTemplate = true;
|
||||
handleSave();
|
||||
|
||||
@@ -74,6 +74,7 @@ export const CredentialsInput: FC<{
|
||||
const [isOAuth2FlowInProgress, setOAuth2FlowInProgress] = useState(false);
|
||||
const [oAuthPopupController, setOAuthPopupController] =
|
||||
useState<AbortController | null>(null);
|
||||
const [oAuthError, setOAuthError] = useState<string | null>(null);
|
||||
|
||||
if (!credentials) {
|
||||
return null;
|
||||
@@ -95,6 +96,7 @@ export const CredentialsInput: FC<{
|
||||
} = credentials;
|
||||
|
||||
async function handleOAuthLogin() {
|
||||
setOAuthError(null);
|
||||
const { login_url, state_token } = await api.oAuthLogin(
|
||||
provider,
|
||||
schema.credentials_scopes,
|
||||
@@ -102,46 +104,81 @@ export const CredentialsInput: FC<{
|
||||
setOAuth2FlowInProgress(true);
|
||||
const popup = window.open(login_url, "_blank", "popup=true");
|
||||
|
||||
if (!popup) {
|
||||
throw new Error(
|
||||
"Failed to open popup window. Please allow popups for this site.",
|
||||
);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
setOAuthPopupController(controller);
|
||||
controller.signal.onabort = () => {
|
||||
console.debug("OAuth flow aborted");
|
||||
setOAuth2FlowInProgress(false);
|
||||
popup?.close();
|
||||
popup.close();
|
||||
};
|
||||
popup?.addEventListener(
|
||||
"message",
|
||||
async (e: MessageEvent<OAuthPopupResultMessage>) => {
|
||||
if (
|
||||
typeof e.data != "object" ||
|
||||
!(
|
||||
"message_type" in e.data &&
|
||||
e.data.message_type == "oauth_popup_result"
|
||||
)
|
||||
)
|
||||
return;
|
||||
|
||||
if (!e.data.success) {
|
||||
console.error("OAuth flow failed:", e.data.message);
|
||||
return;
|
||||
}
|
||||
const handleMessage = async (e: MessageEvent<OAuthPopupResultMessage>) => {
|
||||
console.debug("Message received:", e.data);
|
||||
if (
|
||||
typeof e.data != "object" ||
|
||||
!("message_type" in e.data) ||
|
||||
e.data.message_type !== "oauth_popup_result"
|
||||
) {
|
||||
console.debug("Ignoring irrelevant message");
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.data.state !== state_token) return;
|
||||
if (!e.data.success) {
|
||||
console.error("OAuth flow failed:", e.data.message);
|
||||
setOAuthError(`OAuth flow failed: ${e.data.message}`);
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (e.data.state !== state_token) {
|
||||
console.error("Invalid state token received");
|
||||
setOAuthError("Invalid state token received");
|
||||
setOAuth2FlowInProgress(false);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
console.debug("Processing OAuth callback");
|
||||
const credentials = await oAuthCallback(e.data.code, e.data.state);
|
||||
console.debug("OAuth callback processed successfully");
|
||||
onSelectCredentials({
|
||||
id: credentials.id,
|
||||
type: "oauth2",
|
||||
title: credentials.title,
|
||||
provider,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("Error in OAuth callback:", error);
|
||||
setOAuthError(
|
||||
// type of error is unkown so we need to use String(error)
|
||||
`Error in OAuth callback: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
} finally {
|
||||
console.debug("Finalizing OAuth flow");
|
||||
setOAuth2FlowInProgress(false);
|
||||
controller.abort("success");
|
||||
},
|
||||
{ signal: controller.signal },
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
console.debug("Adding message event listener");
|
||||
window.addEventListener("message", handleMessage, {
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
setTimeout(
|
||||
() => {
|
||||
console.debug("OAuth flow timed out");
|
||||
controller.abort("timeout");
|
||||
setOAuth2FlowInProgress(false);
|
||||
setOAuthError("OAuth flow timed out");
|
||||
},
|
||||
5 * 60 * 1000,
|
||||
);
|
||||
@@ -189,6 +226,9 @@ export const CredentialsInput: FC<{
|
||||
)}
|
||||
</div>
|
||||
{modals}
|
||||
{oAuthError && (
|
||||
<div className="mt-2 text-red-500">Error: {oAuthError}</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -251,6 +291,9 @@ export const CredentialsInput: FC<{
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{modals}
|
||||
{oAuthError && (
|
||||
<div className="mt-2 text-red-500">Error: {oAuthError}</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,81 +1,44 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import {
|
||||
ArrowLeft,
|
||||
Download,
|
||||
Calendar,
|
||||
Tag,
|
||||
ChevronDown,
|
||||
ChevronUp,
|
||||
} from "lucide-react";
|
||||
import { ArrowLeft, Download, Calendar, Tag } from "lucide-react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
AgentDetailResponse,
|
||||
InstallationLocation,
|
||||
} from "@/lib/marketplace-api";
|
||||
import dynamic from "next/dynamic";
|
||||
import { Node, Edge } from "@xyflow/react";
|
||||
import MarketplaceAPI from "@/lib/marketplace-api";
|
||||
import AutoGPTServerAPI, { GraphCreatable } from "@/lib/autogpt-server-api";
|
||||
|
||||
const ReactFlow = dynamic(
|
||||
() => import("@xyflow/react").then((mod) => mod.ReactFlow),
|
||||
{ ssr: false },
|
||||
);
|
||||
const Controls = dynamic(
|
||||
() => import("@xyflow/react").then((mod) => mod.Controls),
|
||||
{ ssr: false },
|
||||
);
|
||||
const Background = dynamic(
|
||||
() => import("@xyflow/react").then((mod) => mod.Background),
|
||||
{ ssr: false },
|
||||
);
|
||||
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import { makeAnalyticsEvent } from "./actions";
|
||||
|
||||
function convertGraphToReactFlow(graph: any): { nodes: Node[]; edges: Edge[] } {
|
||||
const nodes: Node[] = graph.nodes.map((node: any) => {
|
||||
let label = node.block_id || "Unknown";
|
||||
try {
|
||||
label = beautifyString(label);
|
||||
} catch (error) {
|
||||
console.error("Error beautifying node label:", error);
|
||||
}
|
||||
async function downloadAgent(id: string): Promise<void> {
|
||||
const api = new MarketplaceAPI();
|
||||
try {
|
||||
const file = await api.downloadAgentFile(id);
|
||||
console.debug(`Agent file downloaded:`, file);
|
||||
|
||||
return {
|
||||
id: node.id,
|
||||
position: node.metadata.position || { x: 0, y: 0 },
|
||||
data: {
|
||||
label,
|
||||
blockId: node.block_id,
|
||||
inputDefault: node.input_default || {},
|
||||
...node, // Include all other node data
|
||||
},
|
||||
type: "custom",
|
||||
};
|
||||
});
|
||||
// Create a Blob from the file content
|
||||
const blob = new Blob([file], { type: "application/json" });
|
||||
|
||||
const edges: Edge[] = graph.links.map((link: any) => ({
|
||||
id: `${link.source_id}-${link.sink_id}`,
|
||||
source: link.source_id,
|
||||
target: link.sink_id,
|
||||
sourceHandle: link.source_name,
|
||||
targetHandle: link.sink_name,
|
||||
type: "custom",
|
||||
data: {
|
||||
sourceId: link.source_id,
|
||||
targetId: link.sink_id,
|
||||
sourceName: link.source_name,
|
||||
targetName: link.sink_name,
|
||||
isStatic: link.is_static,
|
||||
},
|
||||
}));
|
||||
// Create a temporary URL for the Blob
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
|
||||
return { nodes, edges };
|
||||
// Create a temporary anchor element
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = `agent_${id}.json`; // Set the filename
|
||||
|
||||
// Append the anchor to the body, click it, and remove it
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
|
||||
// Revoke the temporary URL
|
||||
window.URL.revokeObjectURL(url);
|
||||
} catch (error) {
|
||||
console.error(`Error downloading agent:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function installGraph(id: string): Promise<void> {
|
||||
@@ -84,12 +47,12 @@ async function installGraph(id: string): Promise<void> {
|
||||
"http://localhost:8015/api/v1/market";
|
||||
const api = new MarketplaceAPI(apiUrl);
|
||||
|
||||
const serverAPIUrl = process.env.AGPT_SERVER_API_URL;
|
||||
const serverAPIUrl = process.env.NEXT_PUBLIC_AGPT_SERVER_API_URL;
|
||||
const serverAPI = new AutoGPTServerAPI(serverAPIUrl);
|
||||
try {
|
||||
console.log(`Installing agent with id: ${id}`);
|
||||
console.debug(`Installing agent with id: ${id}`);
|
||||
let agent = await api.downloadAgent(id);
|
||||
console.log(`Agent downloaded:`, agent);
|
||||
console.debug(`Agent downloaded:`, agent);
|
||||
const data: GraphCreatable = {
|
||||
id: agent.id,
|
||||
version: agent.version,
|
||||
@@ -109,7 +72,7 @@ async function installGraph(id: string): Promise<void> {
|
||||
installation_location: InstallationLocation.CLOUD,
|
||||
},
|
||||
});
|
||||
console.log(`Agent installed successfully`, result);
|
||||
console.debug(`Agent installed successfully`, result);
|
||||
} catch (error) {
|
||||
console.error(`Error installing agent:`, error);
|
||||
throw error;
|
||||
@@ -117,9 +80,6 @@ async function installGraph(id: string): Promise<void> {
|
||||
}
|
||||
|
||||
function AgentDetailContent({ agent }: { agent: AgentDetailResponse }) {
|
||||
const [isGraphExpanded, setIsGraphExpanded] = useState(false);
|
||||
const { nodes, edges } = convertGraphToReactFlow(agent.graph);
|
||||
|
||||
return (
|
||||
<div className="mx-auto max-w-7xl px-4 py-4 sm:px-6 lg:px-8">
|
||||
<div className="mb-4 flex items-center justify-between">
|
||||
@@ -130,13 +90,22 @@ function AgentDetailContent({ agent }: { agent: AgentDetailResponse }) {
|
||||
<ArrowLeft className="mr-2" size={20} />
|
||||
Back to Marketplace
|
||||
</Link>
|
||||
<Button
|
||||
onClick={() => installGraph(agent.id)}
|
||||
className="inline-flex items-center rounded-md border border-transparent bg-indigo-600 px-4 py-2 text-sm font-medium text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
|
||||
>
|
||||
<Download className="mr-2" size={16} />
|
||||
Download Agent
|
||||
</Button>
|
||||
<div className="flex space-x-4">
|
||||
<Button
|
||||
onClick={() => installGraph(agent.id)}
|
||||
className="inline-flex items-center rounded-md border border-transparent bg-indigo-600 px-4 py-2 text-sm font-medium text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
|
||||
>
|
||||
<Download className="mr-2" size={16} />
|
||||
Save to Templates
|
||||
</Button>
|
||||
<Button
|
||||
onClick={() => downloadAgent(agent.id)}
|
||||
className="inline-flex items-center rounded-md border border-transparent bg-indigo-600 px-4 py-2 text-sm font-medium text-white shadow-sm hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2"
|
||||
>
|
||||
<Download className="mr-2" size={16} />
|
||||
Download Agent
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="overflow-hidden bg-white shadow sm:rounded-lg">
|
||||
<div className="px-4 py-5 sm:px-6">
|
||||
|
||||
27
autogpt_platform/frontend/src/components/nav/MarketPopup.tsx
Normal file
27
autogpt_platform/frontend/src/components/nav/MarketPopup.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import { ButtonHTMLAttributes } from "react";
|
||||
import React from "react";
|
||||
|
||||
interface MarketPopupProps extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
marketplaceUrl?: string;
|
||||
}
|
||||
|
||||
export default function MarketPopup({
|
||||
className = "",
|
||||
marketplaceUrl = "http://platform.agpt.co/marketplace",
|
||||
children,
|
||||
...props
|
||||
}: MarketPopupProps) {
|
||||
const openMarketplacePopup = () => {
|
||||
window.open(
|
||||
marketplaceUrl,
|
||||
"popupWindow",
|
||||
"width=600,height=400,toolbar=no,menubar=no,scrollbars=no",
|
||||
);
|
||||
};
|
||||
|
||||
return (
|
||||
<button onClick={openMarketplacePopup} className={className} {...props}>
|
||||
{children}
|
||||
</button>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
"use client";
|
||||
|
||||
import React from "react";
|
||||
import Link from "next/link";
|
||||
import { BsBoxes } from "react-icons/bs";
|
||||
import { LuLaptop, LuShoppingCart } from "react-icons/lu";
|
||||
import { BehaveAs, cn } from "@/lib/utils";
|
||||
import { usePathname } from "next/navigation";
|
||||
import { getBehaveAs } from "@/lib/utils";
|
||||
import MarketPopup from "./MarketPopup";
|
||||
|
||||
export function NavBarButtons({ className }: { className?: string }) {
|
||||
const pathname = usePathname();
|
||||
const buttons = [
|
||||
{
|
||||
href: "/",
|
||||
text: "Monitor",
|
||||
icon: <LuLaptop />,
|
||||
},
|
||||
{
|
||||
href: "/build",
|
||||
text: "Build",
|
||||
icon: <BsBoxes />,
|
||||
},
|
||||
];
|
||||
|
||||
const isCloud = getBehaveAs() === BehaveAs.CLOUD;
|
||||
|
||||
return (
|
||||
<>
|
||||
{buttons.map((button) => {
|
||||
const isActive = button.href === pathname;
|
||||
return (
|
||||
<Link
|
||||
key={button.href}
|
||||
href={button.href}
|
||||
className={cn(
|
||||
className,
|
||||
"flex items-center gap-2 rounded-xl p-3",
|
||||
isActive
|
||||
? "bg-gray-950 text-white"
|
||||
: "text-muted-foreground hover:text-foreground",
|
||||
)}
|
||||
>
|
||||
{button.icon} {button.text}
|
||||
</Link>
|
||||
);
|
||||
})}
|
||||
{isCloud ? (
|
||||
<Link
|
||||
href="/marketplace"
|
||||
className={cn(
|
||||
className,
|
||||
"flex items-center gap-2 rounded-xl p-3",
|
||||
pathname === "/marketplace"
|
||||
? "bg-gray-950 text-white"
|
||||
: "text-muted-foreground hover:text-foreground",
|
||||
)}
|
||||
>
|
||||
<LuShoppingCart /> Marketplace
|
||||
</Link>
|
||||
) : (
|
||||
<MarketPopup
|
||||
className={cn(
|
||||
className,
|
||||
"flex items-center gap-2 rounded-xl p-3 text-muted-foreground hover:text-foreground",
|
||||
)}
|
||||
>
|
||||
<LuShoppingCart /> Marketplace
|
||||
</MarketPopup>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import "shepherd.js/dist/css/shepherd.css";
|
||||
|
||||
export const startTutorial = (
|
||||
setPinBlocksPopover: (value: boolean) => void,
|
||||
setPinSavePopover: (value: boolean) => void,
|
||||
) => {
|
||||
const tour = new Shepherd.Tour({
|
||||
useModalOverlay: true,
|
||||
@@ -20,27 +21,20 @@ export const startTutorial = (
|
||||
|
||||
// Helper function to disable all blocks except the target block
|
||||
const disableOtherBlocks = (targetBlockSelector: string) => {
|
||||
document
|
||||
.querySelectorAll('[data-id^="add-block-button"]')
|
||||
.forEach((block) => {
|
||||
block.classList.toggle(
|
||||
disableClass,
|
||||
!block.matches(targetBlockSelector),
|
||||
);
|
||||
block.classList.toggle(
|
||||
highlightClass,
|
||||
block.matches(targetBlockSelector),
|
||||
);
|
||||
});
|
||||
document.querySelectorAll('[data-id^="block-card-"]').forEach((block) => {
|
||||
block.classList.toggle(disableClass, !block.matches(targetBlockSelector));
|
||||
block.classList.toggle(
|
||||
highlightClass,
|
||||
block.matches(targetBlockSelector),
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
// Helper function to enable all blocks
|
||||
const enableAllBlocks = () => {
|
||||
document
|
||||
.querySelectorAll('[data-id^="add-block-button"]')
|
||||
.forEach((block) => {
|
||||
block.classList.remove(disableClass, highlightClass);
|
||||
});
|
||||
document.querySelectorAll('[data-id^="block-card-"]').forEach((block) => {
|
||||
block.classList.remove(disableClass, highlightClass);
|
||||
});
|
||||
};
|
||||
|
||||
// Inject CSS for disabling and highlighting blocks
|
||||
@@ -78,7 +72,7 @@ export const startTutorial = (
|
||||
const detectConnection = () => {
|
||||
const checkForConnection = () => {
|
||||
const correctConnection = document.querySelector(
|
||||
'[data-testid="rf__edge-1_result_2_a"]',
|
||||
'[data-testid^="rf__edge-"]',
|
||||
);
|
||||
if (correctConnection) {
|
||||
tour.show("press-run-again");
|
||||
@@ -117,12 +111,12 @@ export const startTutorial = (
|
||||
function handleMouseUp(event: { target: any }) {
|
||||
const target = event.target;
|
||||
const validConnectionPoint = document.querySelector(
|
||||
'[data-id="2-a-target"]',
|
||||
'[data-id="custom-node-2"] [data-handlepos="left"]',
|
||||
);
|
||||
|
||||
if (validConnectionPoint && !validConnectionPoint.contains(target)) {
|
||||
setTimeout(() => {
|
||||
if (!document.querySelector('[data-testid="rf__edge-1_result_2_a"]')) {
|
||||
if (!document.querySelector('[data-testid^="rf__edge-"]')) {
|
||||
stopConnecting();
|
||||
tour.show("connect-blocks-output");
|
||||
}
|
||||
@@ -314,32 +308,127 @@ export const startTutorial = (
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "press-run",
|
||||
title: "Press Run",
|
||||
text: "Start your first flow by pressing the Run button!",
|
||||
attachTo: { element: '[data-id="control-button-2"]', on: "right" },
|
||||
advanceOn: { selector: '[data-id="control-button-2"]', event: "click" },
|
||||
id: "press-initial-save-button",
|
||||
title: "Press Save",
|
||||
text: "First we need to save the flow before we can run it!",
|
||||
attachTo: {
|
||||
element: '[data-id="save-control-popover-trigger"]',
|
||||
on: "left",
|
||||
},
|
||||
advanceOn: {
|
||||
selector: '[data-id="save-control-popover-trigger"]',
|
||||
event: "click",
|
||||
},
|
||||
buttons: [
|
||||
{
|
||||
text: "Back",
|
||||
action: tour.back,
|
||||
},
|
||||
],
|
||||
when: {
|
||||
hide: () => setPinSavePopover(true),
|
||||
},
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "enter-agent-name",
|
||||
title: "Enter Agent Name",
|
||||
text: 'Please enter any agent name, here we can just call it "Tutorial" if you\'d like.',
|
||||
attachTo: {
|
||||
element: '[data-id="save-control-name-input"]',
|
||||
on: "bottom",
|
||||
},
|
||||
buttons: [
|
||||
{
|
||||
text: "Back",
|
||||
action: tour.back,
|
||||
},
|
||||
{
|
||||
text: "Next",
|
||||
action: tour.next,
|
||||
},
|
||||
],
|
||||
beforeShowPromise: () =>
|
||||
waitForElement('[data-id="save-control-name-input"]'),
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "enter-agent-description",
|
||||
title: "Enter Agent Description",
|
||||
text: "This is where you can add a description if you'd like, but that is optional.",
|
||||
attachTo: {
|
||||
element: '[data-id="save-control-description-input"]',
|
||||
on: "bottom",
|
||||
},
|
||||
buttons: [
|
||||
{
|
||||
text: "Back",
|
||||
action: tour.back,
|
||||
},
|
||||
{
|
||||
text: "Next",
|
||||
action: tour.next,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "save-agent",
|
||||
title: "Save the Agent",
|
||||
text: "Now, let's save the agent by clicking the 'Save agent' button.",
|
||||
attachTo: {
|
||||
element: '[data-id="save-control-save-agent"]',
|
||||
on: "top",
|
||||
},
|
||||
advanceOn: {
|
||||
selector: '[data-id="save-control-save-agent"]',
|
||||
event: "click",
|
||||
},
|
||||
buttons: [],
|
||||
when: {
|
||||
hide: () => setPinSavePopover(false),
|
||||
},
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "press-run",
|
||||
title: "Press Run",
|
||||
text: "Start your first flow by pressing the Run button!",
|
||||
attachTo: { element: '[data-id="primary-action-run-agent"]', on: "top" },
|
||||
advanceOn: {
|
||||
selector: '[data-id="primary-action-run-agent"]',
|
||||
event: "click",
|
||||
},
|
||||
buttons: [],
|
||||
beforeShowPromise: () =>
|
||||
waitForElement('[data-id="primary-action-run-agent"]'),
|
||||
when: {
|
||||
hide: () => {
|
||||
setTimeout(() => {
|
||||
fitViewToScreen();
|
||||
}, 500);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
id: "wait-for-processing",
|
||||
title: "Processing",
|
||||
text: "Let's wait for the block to finish being processed...",
|
||||
attachTo: { element: '[data-id="badge-1-QUEUED"]', on: "bottom" },
|
||||
attachTo: {
|
||||
element: '[data-id^="badge-"][data-id$="-QUEUED"]',
|
||||
on: "bottom",
|
||||
},
|
||||
buttons: [],
|
||||
beforeShowPromise: () => waitForElement('[data-id="badge-1-QUEUED"]'),
|
||||
beforeShowPromise: () =>
|
||||
waitForElement('[data-id^="badge-"][data-id$="-QUEUED"]'),
|
||||
when: {
|
||||
show: () => {
|
||||
fitViewToScreen();
|
||||
waitForElement('[data-id="badge-1-COMPLETED"]').then(() => {
|
||||
tour.next();
|
||||
});
|
||||
waitForElement('[data-id^="badge-"][data-id$="-COMPLETED"]').then(
|
||||
() => {
|
||||
tour.next();
|
||||
},
|
||||
);
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -371,7 +460,7 @@ export const startTutorial = (
|
||||
id: "copy-paste-block",
|
||||
title: "Copy and Paste the Block",
|
||||
text: "Let’s duplicate this block. Click and hold the block with your mouse, then press Ctrl+C (Cmd+C on Mac) to copy and Ctrl+V (Cmd+V on Mac) to paste.",
|
||||
attachTo: { element: `[data-id="custom-node-1"]`, on: "top" },
|
||||
attachTo: { element: '[data-id^="custom-node-"]', on: "top" },
|
||||
buttons: [
|
||||
{
|
||||
text: "Back",
|
||||
@@ -392,8 +481,9 @@ export const startTutorial = (
|
||||
id: "focus-second-block",
|
||||
title: "Focus on the New Block",
|
||||
text: "This is your copied Calculator Block. Now, let’s move it to the side of the first block.",
|
||||
attachTo: { element: `[data-id="custom-node-2"]`, on: "top" },
|
||||
beforeShowPromise: () => waitForElement('[data-id="custom-node-2"]'),
|
||||
attachTo: { element: `[data-id^="custom-node-"][data-id$="2"]`, on: "top" },
|
||||
beforeShowPromise: () =>
|
||||
waitForElement('[data-id^="custom-node-"][data-id$="2"]'),
|
||||
buttons: [
|
||||
{
|
||||
text: "Next",
|
||||
@@ -405,8 +495,13 @@ export const startTutorial = (
|
||||
tour.addStep({
|
||||
id: "connect-blocks-output",
|
||||
title: "Connect the Blocks: Output",
|
||||
text: "Now, let’s connect the output of the first Calculator Block to the input of the second Calculator Block. Drag from the output pin of the first block to the input pin (A) of the second block.",
|
||||
attachTo: { element: '[data-id="1-1-result-source"]', on: "bottom" },
|
||||
text: "Now, let's connect the output of the first Calculator Block to the input of the second Calculator Block. Drag from the output pin of the first block to the input pin (A) of the second block.",
|
||||
attachTo: {
|
||||
element:
|
||||
'[data-id^="1-"][data-id$="-result-source"]:not([data-id="1-2-result-source"])',
|
||||
on: "bottom",
|
||||
},
|
||||
|
||||
buttons: [
|
||||
{
|
||||
text: "Back",
|
||||
@@ -414,7 +509,9 @@ export const startTutorial = (
|
||||
},
|
||||
],
|
||||
beforeShowPromise: () => {
|
||||
return waitForElement('[data-id="1-1-result-source"]');
|
||||
return waitForElement(
|
||||
'[data-id^="1-"][data-id$="-result-source"]:not([data-id="1-2-result-source"])',
|
||||
).then(() => {});
|
||||
},
|
||||
when: {
|
||||
show: () => {
|
||||
@@ -422,7 +519,7 @@ export const startTutorial = (
|
||||
resetConnectionState(); // Reset state when revisiting this step
|
||||
tour.modal.show();
|
||||
const outputPin = document.querySelector(
|
||||
'[data-id="1-1-result-source"]',
|
||||
'[data-id^="1-"][data-id$="-result-source"]:not([data-id="1-2-result-source"])',
|
||||
);
|
||||
if (outputPin) {
|
||||
outputPin.addEventListener("mousedown", handleMouseDown);
|
||||
@@ -430,7 +527,7 @@ export const startTutorial = (
|
||||
},
|
||||
hide: () => {
|
||||
const outputPin = document.querySelector(
|
||||
'[data-id="1-1-result-source"]',
|
||||
'[data-id^="1-"][data-id$="-result-source"]:not([data-id="1-2-result-source"])',
|
||||
);
|
||||
if (outputPin) {
|
||||
outputPin.removeEventListener("mousedown", handleMouseDown);
|
||||
@@ -443,7 +540,10 @@ export const startTutorial = (
|
||||
id: "connect-blocks-input",
|
||||
title: "Connect the Blocks: Input",
|
||||
text: "Now, connect the output to the input pin of the second block (A).",
|
||||
attachTo: { element: '[data-id="1-2-a-target"]', on: "top" },
|
||||
attachTo: {
|
||||
element: '[data-id="1-2-a-target"]',
|
||||
on: "top",
|
||||
},
|
||||
buttons: [],
|
||||
beforeShowPromise: () => {
|
||||
return waitForElement('[data-id="1-2-a-target"]').then(() => {
|
||||
@@ -466,9 +566,21 @@ export const startTutorial = (
|
||||
id: "press-run-again",
|
||||
title: "Press Run Again",
|
||||
text: "Now, press the Run button again to execute the flow with the new Calculator Block added!",
|
||||
attachTo: { element: '[data-id="control-button-2"]', on: "right" },
|
||||
advanceOn: { selector: '[data-id="control-button-2"]', event: "click" },
|
||||
attachTo: { element: '[data-id="primary-action-run-agent"]', on: "top" },
|
||||
advanceOn: {
|
||||
selector: '[data-id="primary-action-run-agent"]',
|
||||
event: "click",
|
||||
},
|
||||
buttons: [],
|
||||
beforeShowPromise: () =>
|
||||
waitForElement('[data-id="primary-action-run-agent"]'),
|
||||
when: {
|
||||
hide: () => {
|
||||
setTimeout(() => {
|
||||
fitViewToScreen();
|
||||
}, 500);
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
tour.addStep({
|
||||
@@ -487,9 +599,10 @@ export const startTutorial = (
|
||||
],
|
||||
});
|
||||
|
||||
// Unpin blocks when the tour is completed or canceled
|
||||
// Unpin blocks and save menu when the tour is completed or canceled
|
||||
tour.on("complete", () => {
|
||||
setPinBlocksPopover(false);
|
||||
setPinSavePopover(false);
|
||||
localStorage.setItem("shepherd-tour", "completed"); // Optionally mark the tutorial as completed
|
||||
});
|
||||
|
||||
@@ -504,7 +617,9 @@ export const startTutorial = (
|
||||
|
||||
tour.on("cancel", () => {
|
||||
setPinBlocksPopover(false);
|
||||
setPinSavePopover(false);
|
||||
localStorage.setItem("shepherd-tour", "canceled"); // Optionally mark the tutorial as canceled
|
||||
});
|
||||
|
||||
tour.start();
|
||||
};
|
||||
|
||||
@@ -18,7 +18,8 @@ const isValidVideoUrl = (url: string): boolean => {
|
||||
|
||||
const isValidImageUrl = (url: string): boolean => {
|
||||
const imageExtensions = /\.(jpeg|jpg|gif|png|svg|webp)$/i;
|
||||
return imageExtensions.test(url);
|
||||
const cleanedUrl = url.split("?")[0];
|
||||
return imageExtensions.test(cleanedUrl);
|
||||
};
|
||||
|
||||
const VideoRenderer: React.FC<{ videoUrl: string }> = ({ videoUrl }) => {
|
||||
|
||||
@@ -45,19 +45,19 @@ export default class BaseAutoGPTServerAPI {
|
||||
return session != null;
|
||||
}
|
||||
|
||||
async createUser(): Promise<User> {
|
||||
createUser(): Promise<User> {
|
||||
return this._request("POST", "/auth/user", {});
|
||||
}
|
||||
|
||||
async getUserCredit(): Promise<{ credits: number }> {
|
||||
getUserCredit(): Promise<{ credits: number }> {
|
||||
return this._get(`/credits`);
|
||||
}
|
||||
|
||||
async getBlocks(): Promise<Block[]> {
|
||||
return await this._get("/blocks");
|
||||
getBlocks(): Promise<Block[]> {
|
||||
return this._get("/blocks");
|
||||
}
|
||||
|
||||
async listGraphs(): Promise<GraphMeta[]> {
|
||||
listGraphs(): Promise<GraphMeta[]> {
|
||||
return this._get(`/graphs`);
|
||||
}
|
||||
|
||||
@@ -66,34 +66,31 @@ export default class BaseAutoGPTServerAPI {
|
||||
return graphs.map(parseGraphMetaWithRuns);
|
||||
}
|
||||
|
||||
async listTemplates(): Promise<GraphMeta[]> {
|
||||
listTemplates(): Promise<GraphMeta[]> {
|
||||
return this._get("/templates");
|
||||
}
|
||||
|
||||
async getGraph(id: string, version?: number): Promise<Graph> {
|
||||
getGraph(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/graphs/${id}` + query);
|
||||
}
|
||||
|
||||
async getTemplate(id: string, version?: number): Promise<Graph> {
|
||||
getTemplate(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/templates/${id}` + query);
|
||||
}
|
||||
|
||||
async getGraphAllVersions(id: string): Promise<Graph[]> {
|
||||
getGraphAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/graphs/${id}/versions`);
|
||||
}
|
||||
|
||||
async getTemplateAllVersions(id: string): Promise<Graph[]> {
|
||||
getTemplateAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/templates/${id}/versions`);
|
||||
}
|
||||
|
||||
async createGraph(graphCreateBody: GraphCreatable): Promise<Graph>;
|
||||
async createGraph(
|
||||
fromTemplateID: string,
|
||||
templateVersion: number,
|
||||
): Promise<Graph>;
|
||||
async createGraph(
|
||||
createGraph(graphCreateBody: GraphCreatable): Promise<Graph>;
|
||||
createGraph(fromTemplateID: string, templateVersion: number): Promise<Graph>;
|
||||
createGraph(
|
||||
graphOrTemplateID: GraphCreatable | string,
|
||||
templateVersion?: number,
|
||||
): Promise<Graph> {
|
||||
@@ -114,36 +111,33 @@ export default class BaseAutoGPTServerAPI {
|
||||
return this._request("POST", "/graphs", requestBody);
|
||||
}
|
||||
|
||||
async createTemplate(templateCreateBody: GraphCreatable): Promise<Graph> {
|
||||
createTemplate(templateCreateBody: GraphCreatable): Promise<Graph> {
|
||||
const requestBody: GraphCreateRequestBody = { graph: templateCreateBody };
|
||||
return this._request("POST", "/templates", requestBody);
|
||||
}
|
||||
|
||||
async updateGraph(id: string, graph: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/graphs/${id}`, graph);
|
||||
updateGraph(id: string, graph: GraphUpdateable): Promise<Graph> {
|
||||
return this._request("PUT", `/graphs/${id}`, graph);
|
||||
}
|
||||
|
||||
async updateTemplate(id: string, template: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/templates/${id}`, template);
|
||||
updateTemplate(id: string, template: GraphUpdateable): Promise<Graph> {
|
||||
return this._request("PUT", `/templates/${id}`, template);
|
||||
}
|
||||
|
||||
async setGraphActiveVersion(id: string, version: number): Promise<Graph> {
|
||||
setGraphActiveVersion(id: string, version: number): Promise<Graph> {
|
||||
return this._request("PUT", `/graphs/${id}/versions/active`, {
|
||||
active_graph_version: version,
|
||||
});
|
||||
}
|
||||
|
||||
async executeGraph(
|
||||
executeGraph(
|
||||
id: string,
|
||||
inputData: { [key: string]: any } = {},
|
||||
): Promise<GraphExecuteResponse> {
|
||||
return this._request("POST", `/graphs/${id}/execute`, inputData);
|
||||
}
|
||||
|
||||
async listGraphRunIDs(
|
||||
graphID: string,
|
||||
graphVersion?: number,
|
||||
): Promise<string[]> {
|
||||
listGraphRunIDs(graphID: string, graphVersion?: number): Promise<string[]> {
|
||||
const query =
|
||||
graphVersion !== undefined ? `?graph_version=${graphVersion}` : "";
|
||||
return this._get(`/graphs/${graphID}/executions` + query);
|
||||
@@ -167,15 +161,15 @@ export default class BaseAutoGPTServerAPI {
|
||||
).map(parseNodeExecutionResultTimestamps);
|
||||
}
|
||||
|
||||
async oAuthLogin(
|
||||
oAuthLogin(
|
||||
provider: string,
|
||||
scopes?: string[],
|
||||
): Promise<{ login_url: string; state_token: string }> {
|
||||
const query = scopes ? { scopes: scopes.join(",") } : undefined;
|
||||
return await this._get(`/integrations/${provider}/login`, query);
|
||||
return this._get(`/integrations/${provider}/login`, query);
|
||||
}
|
||||
|
||||
async oAuthCallback(
|
||||
oAuthCallback(
|
||||
provider: string,
|
||||
code: string,
|
||||
state_token: string,
|
||||
@@ -186,7 +180,7 @@ export default class BaseAutoGPTServerAPI {
|
||||
});
|
||||
}
|
||||
|
||||
async createAPIKeyCredentials(
|
||||
createAPIKeyCredentials(
|
||||
credentials: Omit<APIKeyCredentials, "id" | "type">,
|
||||
): Promise<APIKeyCredentials> {
|
||||
return this._request(
|
||||
@@ -196,29 +190,29 @@ export default class BaseAutoGPTServerAPI {
|
||||
);
|
||||
}
|
||||
|
||||
async listCredentials(provider: string): Promise<CredentialsMetaResponse[]> {
|
||||
listCredentials(provider: string): Promise<CredentialsMetaResponse[]> {
|
||||
return this._get(`/integrations/${provider}/credentials`);
|
||||
}
|
||||
|
||||
async getCredentials(
|
||||
getCredentials(
|
||||
provider: string,
|
||||
id: string,
|
||||
): Promise<APIKeyCredentials | OAuth2Credentials> {
|
||||
return this._get(`/integrations/${provider}/credentials/${id}`);
|
||||
}
|
||||
|
||||
async deleteCredentials(provider: string, id: string): Promise<void> {
|
||||
deleteCredentials(provider: string, id: string): Promise<void> {
|
||||
return this._request(
|
||||
"DELETE",
|
||||
`/integrations/${provider}/credentials/${id}`,
|
||||
);
|
||||
}
|
||||
|
||||
async logMetric(metric: AnalyticsMetrics) {
|
||||
logMetric(metric: AnalyticsMetrics) {
|
||||
return this._request("POST", "/analytics/log_raw_metric", metric);
|
||||
}
|
||||
|
||||
async logAnalytic(analytic: AnalyticsDetails) {
|
||||
logAnalytic(analytic: AnalyticsDetails) {
|
||||
return this._request("POST", "/analytics/log_raw_analytics", analytic);
|
||||
}
|
||||
|
||||
|
||||
@@ -203,3 +203,14 @@ export function filterBlocksByType<T>(
|
||||
): T[] {
|
||||
return blocks.filter(predicate);
|
||||
}
|
||||
|
||||
export enum BehaveAs {
|
||||
CLOUD = "CLOUD",
|
||||
LOCAL = "LOCAL",
|
||||
}
|
||||
|
||||
export function getBehaveAs(): BehaveAs {
|
||||
return process.env.NEXT_PUBLIC_BEHAVE_AS === "CLOUD"
|
||||
? BehaveAs.CLOUD
|
||||
: BehaveAs.LOCAL;
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,11 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: {{ include "autogpt-builder.fullname" . }}-config
|
||||
labels:
|
||||
{{- include "autogpt-builder.labels" . | nindent 4 }}
|
||||
data:
|
||||
.env.local: |-
|
||||
{{- range $key, $value := .Values.env }}
|
||||
{{ $key }}={{ $value }}
|
||||
{{- end }}
|
||||
@@ -5,4 +5,4 @@ metadata:
|
||||
spec:
|
||||
redirectToHttps:
|
||||
enabled: true
|
||||
responseCodeName: 301
|
||||
responseCodeName: MOVED_PERMANENTLY_DEFAULT
|
||||
@@ -4,4 +4,5 @@ metadata:
|
||||
name: {{ include "autogpt-builder.fullname" . }}-cert
|
||||
spec:
|
||||
domains:
|
||||
- {{ .Values.domain }}
|
||||
- {{ .Values.domain }}
|
||||
- {{ .Values.wwwDomain }}
|
||||
@@ -53,12 +53,17 @@ livenessProbe: null
|
||||
readinessProbe: null
|
||||
|
||||
domain: "dev-builder.agpt.co"
|
||||
wwwDomain: "www.dev-builder.agpt.co"
|
||||
|
||||
|
||||
env:
|
||||
APP_ENV: "dev"
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL: ["http://agpt-server:8000/api"]
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL: ""
|
||||
GOOGLE_CLIENT_ID: ""
|
||||
GOOGLE_CLIENT_SECRET: ""
|
||||
NEXT_PUBLIC_SUPABASE_URL: ""
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY: ""
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY: ""
|
||||
SENTRY_AUTH_TOKEN: ""
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL: "https://dev-server.agpt.co/auth/callback"
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL: "wss://dev-ws-server.agpt.co/ws"
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL: "https://dev-market.agpt.co/api/v1/market"
|
||||
88
autogpt_platform/infra/helm/autogpt-builder/values.prod.yaml
Normal file
88
autogpt_platform/infra/helm/autogpt-builder/values.prod.yaml
Normal file
@@ -0,0 +1,88 @@
|
||||
# prod values, overwrite base values as needed.
|
||||
|
||||
image:
|
||||
repository: us-east1-docker.pkg.dev/agpt-prod/agpt-frontend-prod/agpt-frontend-prod
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
|
||||
serviceAccount:
|
||||
annotations:
|
||||
iam.gke.io/gcp-service-account: "prod-agpt-builder-sa@agpt-prod.iam.gserviceaccount.com"
|
||||
name: "prod-agpt-builder-sa"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8000
|
||||
targetPort: 3000
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "gce"
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: gce
|
||||
kubernetes.io/ingress.global-static-ip-name: "agpt-prod-agpt-frontend-ip"
|
||||
networking.gke.io/managed-certificates: "autogpt-builder-cert"
|
||||
kubernetes.io/ingress.allow-http: "true"
|
||||
networking.gke.io/v1beta1.FrontendConfig: "autogpt-builder-frontend-config"
|
||||
hosts:
|
||||
- host: platform.agpt.co
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: autogpt-builder
|
||||
port: 8000
|
||||
- host: www.platform.agpt.co
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: autogpt-builder
|
||||
port: 8000
|
||||
defaultBackend:
|
||||
service:
|
||||
name: autogpt-builder
|
||||
port:
|
||||
number: 8000
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
|
||||
livenessProbe: null
|
||||
readinessProbe: null
|
||||
|
||||
domain: "platform.agpt.co"
|
||||
wwwDomain: "www.platform.agpt.co"
|
||||
|
||||
volumes:
|
||||
- name: env-config
|
||||
configMap:
|
||||
name: 'autogpt-builder-config'
|
||||
|
||||
volumeMounts:
|
||||
- name: env-config
|
||||
mountPath: /app/.env.local
|
||||
subPath: .env.local
|
||||
readonly: true
|
||||
|
||||
|
||||
env:
|
||||
APP_ENV: "prod"
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL: "https://backend.agpt.co/auth/callback"
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL: "https://backend.agpt.co/api"
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL: "wss://ws-backend.agpt.co/ws"
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL: "https://market.agpt.co/api/v1/market"
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY: ""
|
||||
NEXT_PUBLIC_SUPABASE_URL: "https://bgwpwdsxblryihinutbx.supabase.co"
|
||||
GOOGLE_CLIENT_ID: ""
|
||||
GOOGLE_CLIENT_SECRET: ""
|
||||
SENTRY_AUTH_TOKEN: ""
|
||||
@@ -68,17 +68,6 @@ readinessProbe:
|
||||
|
||||
domain: "dev-market.agpt.co"
|
||||
|
||||
cloudSqlProxy:
|
||||
image:
|
||||
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
|
||||
tag: 2.11.4
|
||||
instanceConnectionName: "agpt-dev:us-central1:agpt-server-dev"
|
||||
port: 5432
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1"
|
||||
|
||||
cors:
|
||||
allowOrigin: "https://dev-builder.agpt.co"
|
||||
allowMethods:
|
||||
|
||||
@@ -68,17 +68,6 @@ readinessProbe:
|
||||
|
||||
domain: "market.agpt.co"
|
||||
|
||||
cloudSqlProxy:
|
||||
image:
|
||||
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
|
||||
tag: 2.11.4
|
||||
instanceConnectionName: "agpt-prod:us-central1:agpt-server-prod"
|
||||
port: 5432
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1"
|
||||
|
||||
cors:
|
||||
allowOrigin: "https://platform.agpt.co"
|
||||
allowMethods:
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
apiVersion: cloud.google.com/v1
|
||||
kind: BackendConfig
|
||||
metadata:
|
||||
name: {{ include "autogpt-server.fullname" . }}-backend-config
|
||||
spec:
|
||||
customRequestHeaders:
|
||||
headers:
|
||||
- "Access-Control-Allow-Origin:{{ .Values.cors.allowOrigin }}"
|
||||
- "Access-Control-Allow-Methods:{{ .Values.cors.allowMethods | join "," }}"
|
||||
- "Access-Control-Allow-Headers:{{ .Values.cors.allowHeaders | join "," }}"
|
||||
- "Access-Control-Max-Age:{{ .Values.cors.maxAge }}"
|
||||
{{- if .Values.cors.allowCredentials }}
|
||||
- "Access-Control-Allow-Credentials:true"
|
||||
{{- end }}
|
||||
customResponseHeaders:
|
||||
headers:
|
||||
- "Access-Control-Allow-Origin:{{ .Values.cors.allowOrigin }}"
|
||||
- "Access-Control-Allow-Methods:{{ .Values.cors.allowMethods | join "," }}"
|
||||
- "Access-Control-Allow-Headers:{{ .Values.cors.allowHeaders | join "," }}"
|
||||
- "Access-Control-Max-Age:{{ .Values.cors.maxAge }}"
|
||||
{{- if .Values.cors.allowCredentials }}
|
||||
- "Access-Control-Allow-Credentials:true"
|
||||
{{- end }}
|
||||
timeoutSec: 1800
|
||||
connectionDraining:
|
||||
drainingTimeoutSec: 1800
|
||||
@@ -0,0 +1,68 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: {{ include "autogpt-server.fullname" . }}-executor
|
||||
labels:
|
||||
app.kubernetes.io/component: executor
|
||||
spec:
|
||||
{{- if not .Values.autoscaling.enabled }}
|
||||
replicas: {{ .Values.replicaCount }}
|
||||
{{- end }}
|
||||
selector:
|
||||
matchLabels:
|
||||
app.kubernetes.io/component: executor
|
||||
template:
|
||||
metadata:
|
||||
{{- with .Values.podAnnotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
labels:
|
||||
app.kubernetes.io/component: executor
|
||||
{{- with .Values.podLabels }}
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
{{- with .Values.imagePullSecrets }}
|
||||
imagePullSecrets:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
serviceAccountName: {{ include "autogpt-server.serviceAccountName" . }}
|
||||
securityContext:
|
||||
{{- toYaml .Values.podSecurityContext | nindent 8 }}
|
||||
containers:
|
||||
- name: {{ .Chart.Name }}
|
||||
envFrom:
|
||||
- configMapRef:
|
||||
name: {{ include "autogpt-server.fullname" . }}-config
|
||||
securityContext:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
command: ["poetry", "run", "executor"]
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.serviceExecutor.port }}
|
||||
protocol: TCP
|
||||
resources:
|
||||
{{- toYaml .Values.resources | nindent 12 }}
|
||||
{{- with .Values.volumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- with .Values.volumes }}
|
||||
volumes:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.nodeSelector }}
|
||||
nodeSelector:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- with .Values.tolerations }}
|
||||
tolerations:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
{{- end }}
|
||||
@@ -44,6 +44,9 @@ spec:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
protocol: TCP
|
||||
- name: pyro
|
||||
containerPort: 8004
|
||||
protocol: TCP
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.livenessProbe | nindent 12 }}
|
||||
readinessProbe:
|
||||
@@ -54,15 +57,6 @@ spec:
|
||||
volumeMounts:
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
- name: cloud-sql-proxy
|
||||
image: "{{ .Values.cloudSqlProxy.image.repository }}:{{ .Values.cloudSqlProxy.image.tag }}"
|
||||
args:
|
||||
- "--structured-logs"
|
||||
{{- if .Values.cloudSqlProxy.usePrivateIp }}
|
||||
- "--private-ip"
|
||||
{{- end }}
|
||||
- "--port={{ .Values.cloudSqlProxy.port }}"
|
||||
- "{{ .Values.cloudSqlProxy.instanceConnectionName }}"
|
||||
{{- with .Values.volumes }}
|
||||
volumes:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: {{ include "autogpt-server.fullname" . }}-executor
|
||||
labels:
|
||||
app.kubernetes.io/component: executor
|
||||
{{- with .Values.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.serviceExecutor.type }}
|
||||
ports:
|
||||
- port: {{ .Values.serviceExecutor.port }}
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
selector:
|
||||
app.kubernetes.io/component: executor
|
||||
@@ -15,5 +15,11 @@ spec:
|
||||
targetPort: http
|
||||
protocol: TCP
|
||||
name: http
|
||||
{{- if .Values.service.pyroDaemonPort }}
|
||||
- port: {{ .Values.service.pyroDaemonPort }}
|
||||
targetPort: pyro
|
||||
protocol: TCP
|
||||
name: pyro
|
||||
{{- end }}
|
||||
selector:
|
||||
{{- include "autogpt-server.selectorLabels" . | nindent 4 }}
|
||||
{{- include "autogpt-server.selectorLabels" . | nindent 4 }}
|
||||
@@ -12,10 +12,18 @@ serviceAccount:
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8000
|
||||
targetPort: 8000
|
||||
port: 8006
|
||||
pyroDaemonPort: 8004
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-server-backend-config"}'
|
||||
|
||||
serviceExecutor:
|
||||
type: ClusterIP
|
||||
port: 8002
|
||||
targetPort: 8002
|
||||
annotations:
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-server-backend-config"}'
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
@@ -33,12 +41,12 @@ ingress:
|
||||
backend:
|
||||
service:
|
||||
name: autogpt-server
|
||||
port: 8000
|
||||
port: 8006
|
||||
defaultBackend:
|
||||
service:
|
||||
name: autogpt-server
|
||||
port:
|
||||
number: 8000
|
||||
number: 8006
|
||||
|
||||
resources:
|
||||
requests:
|
||||
@@ -46,12 +54,12 @@ resources:
|
||||
memory: 256Mi
|
||||
limits:
|
||||
cpu: 2
|
||||
memory: 2Gi
|
||||
memory: 10Gi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: 8000
|
||||
port: 8006
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
@@ -59,7 +67,7 @@ livenessProbe:
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: 8000
|
||||
port: 8006
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
@@ -67,28 +75,39 @@ readinessProbe:
|
||||
|
||||
domain: "dev-server.agpt.co"
|
||||
|
||||
cloudSqlProxy:
|
||||
image:
|
||||
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
|
||||
tag: 2.11.4
|
||||
instanceConnectionName: "agpt-dev:us-central1:agpt-server-dev"
|
||||
port: 5432
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1"
|
||||
cors:
|
||||
allowOrigin: "https://dev-builder.agpt.co"
|
||||
allowMethods:
|
||||
- "GET"
|
||||
- "POST"
|
||||
- "PUT"
|
||||
- "DELETE"
|
||||
- "OPTIONS"
|
||||
allowHeaders:
|
||||
- "Content-Type"
|
||||
- "Authorization"
|
||||
maxAge: 3600
|
||||
allowCredentials: true
|
||||
|
||||
env:
|
||||
APP_ENV: "dev"
|
||||
PYRO_HOST: "0.0.0.0"
|
||||
NUM_GRAPH_WORKERS: 100
|
||||
NUM_NODE_WORKERS: 5
|
||||
ENABLE_AUTH: "true"
|
||||
REDIS_HOST: "redis-dev-master.redis-dev.svc.cluster.local"
|
||||
REDIS_PORT: "6379"
|
||||
OPENAI_API_KEY: ""
|
||||
SUPABASE_JWT_SECRET: ""
|
||||
REDIS_PASSWORD: "password"
|
||||
NUM_GRAPH_WORKERS: 10
|
||||
NUM_NODE_WORKERS: 5
|
||||
DATABASE_URL: ""
|
||||
SENTRY_DSN: ""
|
||||
ENABLE_CREDIT: "true"
|
||||
BACKEND_CORS_ALLOW_ORIGINS: '["https://dev-builder.agpt.co"]'
|
||||
SUPABASE_SERVICE_ROLE_KEY: ""
|
||||
GITHUB_CLIENT_ID: ""
|
||||
GITHUB_CLIENT_SECRET: ""
|
||||
FRONTEND_BASE_URL: ""
|
||||
SUPABASE_URL: ""
|
||||
SUPABASE_JWT_SECRET: ""
|
||||
FRONTEND_BASE_URL: "https://dev-builder.agpt.co/"
|
||||
SUPABASE_URL: "https://adfjtextkuilwuhzdjpf.supabase.co"
|
||||
AGENTSERVER_HOST: "autogpt-server.dev-agpt.svc.cluster.local"
|
||||
EXECUTIONMANAGER_HOST: "autogpt-server-executor.dev-agpt.svc.cluster.local"
|
||||
113
autogpt_platform/infra/helm/autogpt-server/values.prod.yaml
Normal file
113
autogpt_platform/infra/helm/autogpt-server/values.prod.yaml
Normal file
@@ -0,0 +1,113 @@
|
||||
# prod values, overwrite base values as needed.
|
||||
|
||||
image:
|
||||
repository: us-east1-docker.pkg.dev/agpt-prod/agpt-backend-prod/agpt-backend-prod
|
||||
pullPolicy: Always
|
||||
tag: "latest"
|
||||
|
||||
serviceAccount:
|
||||
annotations:
|
||||
iam.gke.io/gcp-service-account: "prod-agpt-server-sa@agpt-prod.iam.gserviceaccount.com"
|
||||
name: "prod-agpt-server-sa"
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8006
|
||||
pyroDaemonPort: 8004
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-server-backend-config"}'
|
||||
|
||||
serviceExecutor:
|
||||
type: ClusterIP
|
||||
port: 8002
|
||||
targetPort: 8002
|
||||
annotations:
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-server-backend-config"}'
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "gce"
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: gce
|
||||
kubernetes.io/ingress.global-static-ip-name: "agpt-prod-agpt-backend-ip"
|
||||
networking.gke.io/managed-certificates: "autogpt-server-cert"
|
||||
networking.gke.io/v1beta1.FrontendConfig: "autogpt-server-frontend-config"
|
||||
hosts:
|
||||
- host: backend.agpt.co
|
||||
paths:
|
||||
- path: /
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: autogpt-server
|
||||
port: 8006
|
||||
defaultBackend:
|
||||
service:
|
||||
name: autogpt-server
|
||||
port:
|
||||
number: 8006
|
||||
|
||||
resources:
|
||||
requests:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
limits:
|
||||
cpu: 2
|
||||
memory: 10Gi
|
||||
|
||||
cors:
|
||||
allowOrigin: "https://platform.agpt.co"
|
||||
allowMethods:
|
||||
- "GET"
|
||||
- "POST"
|
||||
- "PUT"
|
||||
- "DELETE"
|
||||
- "OPTIONS"
|
||||
allowHeaders:
|
||||
- "Content-Type"
|
||||
- "Authorization"
|
||||
maxAge: 3600
|
||||
allowCredentials: true
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: 8006
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 6
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /docs
|
||||
port: 8006
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
timeoutSeconds: 5
|
||||
failureThreshold: 6
|
||||
|
||||
domain: "backend.agpt.co"
|
||||
|
||||
env:
|
||||
APP_ENV: "prod"
|
||||
PYRO_HOST: "0.0.0.0"
|
||||
ENABLE_AUTH: "true"
|
||||
REDIS_HOST: "redis-prod-master.redis-prod.svc.cluster.local"
|
||||
REDIS_PORT: "6379"
|
||||
OPENAI_API_KEY: ""
|
||||
REDIS_PASSWORD: ""
|
||||
NUM_GRAPH_WORKERS: 10
|
||||
NUM_NODE_WORKERS: 5
|
||||
ENABLE_CREDIT: "true"
|
||||
BACKEND_CORS_ALLOW_ORIGINS: '["https://platform.agpt.co"]'
|
||||
SUPABASE_JWT_SECRET: ""
|
||||
DATABASE_URL: ""
|
||||
SENTRY_DSN: ""
|
||||
SUPABASE_SERVICE_ROLE_KEY: ""
|
||||
FRONTEND_BASE_URL: "https://platform.agpt.co/"
|
||||
SUPABASE_URL: "https://bgwpwdsxblryihinutbx.supabase.co"
|
||||
AGENTSERVER_HOST: "autogpt-server.prod-agpt.svc.cluster.local"
|
||||
EXECUTIONMANAGER_HOST: "autogpt-server-executor.prod-agpt.svc.cluster.local"
|
||||
GITHUB_CLIENT_ID: ""
|
||||
GITHUB_CLIENT_SECRET: ""
|
||||
@@ -45,17 +45,9 @@ resources:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
limits:
|
||||
cpu: 500m
|
||||
memory: 512Mi
|
||||
cpu: 1
|
||||
memory: 1Gi
|
||||
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: http
|
||||
|
||||
autoscaling:
|
||||
enabled: false
|
||||
@@ -75,13 +67,18 @@ affinity: {}
|
||||
|
||||
domain: ""
|
||||
|
||||
cloudSqlProxy:
|
||||
image:
|
||||
repository: gcr.io/cloud-sql-connectors/cloud-sql-proxy
|
||||
tag: 2.11.4
|
||||
instanceConnectionName: ""
|
||||
port: 5432
|
||||
resources:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1"
|
||||
cors:
|
||||
allowOrigins:
|
||||
- "https://dev-builder.agpt.co"
|
||||
allowMethods:
|
||||
- "GET"
|
||||
- "POST"
|
||||
- "PUT"
|
||||
- "DELETE"
|
||||
- "OPTIONS"
|
||||
allowHeaders:
|
||||
- "Content-Type"
|
||||
- "Authorization"
|
||||
maxAge: 3600
|
||||
allowCredentials: true
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
apiVersion: cloud.google.com/v1
|
||||
kind: BackendConfig
|
||||
metadata:
|
||||
name: {{ include "autogpt-websocket-server.fullname" . }}-backend-config
|
||||
spec:
|
||||
customRequestHeaders:
|
||||
headers:
|
||||
- "Access-Control-Allow-Origin:{{ .Values.cors.allowOrigin }}"
|
||||
- "Access-Control-Allow-Methods:{{ .Values.cors.allowMethods | join "," }}"
|
||||
- "Access-Control-Allow-Headers:{{ .Values.cors.allowHeaders | join "," }}"
|
||||
- "Access-Control-Max-Age:{{ .Values.cors.maxAge }}"
|
||||
{{- if .Values.cors.allowCredentials }}
|
||||
- "Access-Control-Allow-Credentials:true"
|
||||
{{- end }}
|
||||
customResponseHeaders:
|
||||
headers:
|
||||
- "Access-Control-Allow-Origin:https://dev-builder.agpt.co"
|
||||
- "Access-Control-Allow-Methods:{{ .Values.cors.allowMethods | join "," }}"
|
||||
- "Access-Control-Allow-Headers:{{ .Values.cors.allowHeaders | join "," }}"
|
||||
- "Access-Control-Max-Age:{{ .Values.cors.maxAge }}"
|
||||
{{- if .Values.cors.allowCredentials }}
|
||||
- "Access-Control-Allow-Credentials:true"
|
||||
{{- end }}
|
||||
timeoutSec: 1800
|
||||
connectionDraining:
|
||||
drainingTimeoutSec: 1800
|
||||
@@ -0,0 +1,6 @@
|
||||
apiVersion: networking.gke.io/v1beta1
|
||||
kind: FrontendConfig
|
||||
metadata:
|
||||
name: {{ include "autogpt-websocket-server.fullname" . }}-frontend-config
|
||||
spec:
|
||||
timeoutSec: 1800
|
||||
@@ -4,6 +4,10 @@ metadata:
|
||||
name: {{ include "autogpt-websocket-server.fullname" . }}
|
||||
labels:
|
||||
{{- include "autogpt-websocket-server.labels" . | nindent 4 }}
|
||||
{{- with .Values.service.annotations }}
|
||||
annotations:
|
||||
{{- toYaml . | nindent 4 }}
|
||||
{{- end }}
|
||||
spec:
|
||||
type: {{ .Values.service.type }}
|
||||
ports:
|
||||
|
||||
@@ -8,6 +8,9 @@ image:
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8001
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-websocket-server-backend-config"}'
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
@@ -19,7 +22,7 @@ ingress:
|
||||
hosts:
|
||||
- host: dev-ws-server.agpt.co
|
||||
paths:
|
||||
- path: /
|
||||
- path: /ws
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
@@ -44,6 +47,20 @@ resources:
|
||||
autoscaling:
|
||||
enabled: false
|
||||
|
||||
cors:
|
||||
allowOrigins: "https://dev-builder.agpt.co"
|
||||
allowMethods:
|
||||
- "GET"
|
||||
- "POST"
|
||||
- "PUT"
|
||||
- "DELETE"
|
||||
- "OPTIONS"
|
||||
allowHeaders:
|
||||
- "Content-Type"
|
||||
- "Authorization"
|
||||
maxAge: 3600
|
||||
allowCredentials: true
|
||||
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
@@ -60,5 +77,4 @@ livenessProbe:
|
||||
env:
|
||||
REDIS_HOST: "redis-dev-master.redis-dev.svc.cluster.local"
|
||||
REDIS_PORT: "6379"
|
||||
REDIS_PASSWORD: "password"
|
||||
BACKEND_CORS_ALLOW_ORIGINS: "https://dev-builder.agpt.co"
|
||||
REDIS_PASSWORD: "password"
|
||||
@@ -0,0 +1,81 @@
|
||||
replicaCount: 1 # not scaling websocket server for now
|
||||
|
||||
image:
|
||||
repository: us-east1-docker.pkg.dev/agpt-prod/agpt-backend-prod/agpt-backend-prod
|
||||
tag: latest
|
||||
pullPolicy: Always
|
||||
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8001
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-websocket-server-backend-config"}'
|
||||
|
||||
ingress:
|
||||
enabled: true
|
||||
className: "gce"
|
||||
annotations:
|
||||
kubernetes.io/ingress.class: gce
|
||||
kubernetes.io/ingress.global-static-ip-name: "agpt-prod-agpt-ws-backend-ip"
|
||||
networking.gke.io/managed-certificates: "autogpt-websocket-server-cert"
|
||||
hosts:
|
||||
- host: ws-backend.agpt.co
|
||||
paths:
|
||||
- path: /ws
|
||||
pathType: Prefix
|
||||
backend:
|
||||
service:
|
||||
name: autogpt-websocket-server
|
||||
port: 8001
|
||||
defaultBackend:
|
||||
service:
|
||||
name: autogpt-websocket-server
|
||||
port:
|
||||
number: 8001
|
||||
|
||||
domain: "ws-backend.agpt.co"
|
||||
|
||||
resources:
|
||||
limits:
|
||||
cpu: 200m
|
||||
memory: 256Mi
|
||||
requests:
|
||||
cpu: 100m
|
||||
memory: 128Mi
|
||||
|
||||
autoscaling:
|
||||
enabled: false
|
||||
|
||||
cors:
|
||||
allowOrigins: "https://platform.agpt.co"
|
||||
allowMethods:
|
||||
- "GET"
|
||||
- "POST"
|
||||
- "PUT"
|
||||
- "DELETE"
|
||||
- "OPTIONS"
|
||||
allowHeaders:
|
||||
- "Content-Type"
|
||||
- "Authorization"
|
||||
maxAge: 3600
|
||||
allowCredentials: true
|
||||
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8001
|
||||
initialDelaySeconds: 10
|
||||
periodSeconds: 5
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /
|
||||
port: 8001
|
||||
initialDelaySeconds: 15
|
||||
periodSeconds: 10
|
||||
|
||||
env:
|
||||
REDIS_HOST: "redis-prod-master.redis-prod.svc.cluster.local"
|
||||
REDIS_PORT: "6379"
|
||||
REDIS_PASSWORD: ""
|
||||
BACKEND_CORS_ALLOW_ORIGINS: '["https://platform.agpt.co"]'
|
||||
@@ -6,4 +6,5 @@ DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:${DB_PORT}/${DB_NAME}
|
||||
SENTRY_DSN=https://11d0640fef35640e0eb9f022eb7d7626@o4505260022104064.ingest.us.sentry.io/4507890252447744
|
||||
|
||||
ENABLE_AUTH=true
|
||||
SUPABASE_JWT_SECRET=our-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
SUPABASE_JWT_SECRET=our-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
BACKEND_CORS_ALLOW_ORIGINS="http://localhost:3000,http://127.0.0.1:3000"
|
||||
@@ -55,6 +55,29 @@ class FeaturedAgentResponse(pydantic.BaseModel):
|
||||
page_size: int
|
||||
total_pages: int
|
||||
|
||||
async def delete_agent(agent_id: str) -> prisma.models.Agents | None:
|
||||
"""
|
||||
Delete an agent from the database.
|
||||
|
||||
Args:
|
||||
agent_id (str): The ID of the agent to delete.
|
||||
|
||||
Returns:
|
||||
prisma.models.Agents | None: The deleted agent if found, None otherwise.
|
||||
|
||||
Raises:
|
||||
AgentQueryError: If there is an error deleting the agent from the database.
|
||||
"""
|
||||
try:
|
||||
deleted_agent = await prisma.models.Agents.prisma().delete(
|
||||
where={"id": agent_id}
|
||||
)
|
||||
return deleted_agent
|
||||
except prisma.errors.PrismaError as e:
|
||||
raise AgentQueryError(f"Database query failed: {str(e)}")
|
||||
except Exception as e:
|
||||
raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
|
||||
|
||||
|
||||
async def create_agent_entry(
|
||||
name: str,
|
||||
@@ -593,24 +616,24 @@ async def get_not_featured_agents(
|
||||
agents = await prisma.client.get_client().query_raw(
|
||||
query=f"""
|
||||
SELECT
|
||||
"Agents".id,
|
||||
"Agents"."createdAt",
|
||||
"Agents"."updatedAt",
|
||||
"Agents".version,
|
||||
"Agents".name,
|
||||
LEFT("Agents".description, 500) AS description,
|
||||
"Agents".author,
|
||||
"Agents".keywords,
|
||||
"Agents".categories,
|
||||
"Agents".graph,
|
||||
"Agents"."submissionStatus",
|
||||
"Agents"."submissionDate",
|
||||
"Agents".search::text AS search
|
||||
FROM "Agents"
|
||||
LEFT JOIN "FeaturedAgent" ON "Agents"."id" = "FeaturedAgent"."agentId"
|
||||
WHERE ("FeaturedAgent"."agentId" IS NULL OR "FeaturedAgent"."featuredCategories" = '{{}}')
|
||||
AND "Agents"."submissionStatus" = 'APPROVED'
|
||||
ORDER BY "Agents"."createdAt" DESC
|
||||
"market"."Agents".id,
|
||||
"market"."Agents"."createdAt",
|
||||
"market"."Agents"."updatedAt",
|
||||
"market"."Agents".version,
|
||||
"market"."Agents".name,
|
||||
LEFT("market"."Agents".description, 500) AS description,
|
||||
"market"."Agents".author,
|
||||
"market"."Agents".keywords,
|
||||
"market"."Agents".categories,
|
||||
"market"."Agents".graph,
|
||||
"market"."Agents"."submissionStatus",
|
||||
"market"."Agents"."submissionDate",
|
||||
"market"."Agents".search::text AS search
|
||||
FROM "market"."Agents"
|
||||
LEFT JOIN "market"."FeaturedAgent" ON "market"."Agents"."id" = "market"."FeaturedAgent"."agentId"
|
||||
WHERE ("market"."FeaturedAgent"."agentId" IS NULL OR "market"."FeaturedAgent"."featuredCategories" = '{{}}')
|
||||
AND "market"."Agents"."submissionStatus" = 'APPROVED'
|
||||
ORDER BY "market"."Agents"."createdAt" DESC
|
||||
LIMIT {page_size} OFFSET {page_size * (page - 1)}
|
||||
""",
|
||||
model=prisma.models.Agents,
|
||||
@@ -630,24 +653,20 @@ async def get_all_categories() -> market.model.CategoriesResponse:
|
||||
CategoriesResponse: A list of unique categories.
|
||||
"""
|
||||
try:
|
||||
categories = await prisma.client.get_client().query_first(
|
||||
query="""
|
||||
SELECT ARRAY_AGG(DISTINCT category ORDER BY category) AS unique_categories
|
||||
FROM (
|
||||
SELECT UNNEST(categories) AS category
|
||||
FROM "Agents"
|
||||
) subquery;
|
||||
""",
|
||||
model=market.model.CategoriesResponse,
|
||||
)
|
||||
if not categories:
|
||||
return market.model.CategoriesResponse(unique_categories=[])
|
||||
agents = await prisma.models.Agents.prisma().find_many(distinct=["categories"])
|
||||
|
||||
return categories
|
||||
# Aggregate categories on the Python side
|
||||
all_categories = set()
|
||||
for agent in agents:
|
||||
all_categories.update(agent.categories)
|
||||
|
||||
unique_categories = sorted(list(all_categories))
|
||||
|
||||
return market.model.CategoriesResponse(unique_categories=unique_categories)
|
||||
except prisma.errors.PrismaError as e:
|
||||
raise AgentQueryError(f"Database query failed: {str(e)}")
|
||||
except Exception as e:
|
||||
# raise AgentQueryError(f"Unexpected error occurred: {str(e)}")
|
||||
# Return an empty list of categories in case of unexpected errors
|
||||
return market.model.CategoriesResponse(unique_categories=[])
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,38 @@ logger = logging.getLogger("marketplace")
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@router.delete("/agent/{agent_id}", response_model=market.model.AgentResponse)
|
||||
async def delete_agent(
|
||||
agent_id: str,
|
||||
user: autogpt_libs.auth.User = fastapi.Depends(
|
||||
autogpt_libs.auth.requires_admin_user
|
||||
),
|
||||
):
|
||||
"""
|
||||
Delete an agent and all related records from the database.
|
||||
|
||||
Args:
|
||||
agent_id (str): The ID of the agent to delete.
|
||||
|
||||
Returns:
|
||||
market.model.AgentResponse: The deleted agent's data.
|
||||
|
||||
Raises:
|
||||
fastapi.HTTPException: If the agent is not found or if there's an error during deletion.
|
||||
"""
|
||||
try:
|
||||
deleted_agent = await market.db.delete_agent(agent_id)
|
||||
if deleted_agent:
|
||||
return market.model.AgentResponse(**deleted_agent.dict())
|
||||
else:
|
||||
raise fastapi.HTTPException(status_code=404, detail="Agent not found")
|
||||
except market.db.AgentQueryError as e:
|
||||
logger.error(f"Error deleting agent: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error deleting agent: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="An unexpected error occurred")
|
||||
|
||||
@router.post("/agent", response_model=market.model.AgentResponse)
|
||||
async def create_agent_entry(
|
||||
request: market.model.AddAgentRequest,
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "AnalyticsTracker" DROP CONSTRAINT "AnalyticsTracker_agentId_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "FeaturedAgent" DROP CONSTRAINT "FeaturedAgent_agentId_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "InstallTracker" DROP CONSTRAINT "InstallTracker_marketplaceAgentId_fkey";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "AnalyticsTracker_agentId_key";
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AnalyticsTracker" ADD CONSTRAINT "AnalyticsTracker_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "InstallTracker" ADD CONSTRAINT "InstallTracker_marketplaceAgentId_fkey" FOREIGN KEY ("marketplaceAgentId") REFERENCES "Agents"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FeaturedAgent" ADD CONSTRAINT "FeaturedAgent_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -47,8 +47,8 @@ model Agents {
|
||||
|
||||
model AnalyticsTracker {
|
||||
id String @id @unique @default(dbgenerated("gen_random_uuid()")) @db.Uuid
|
||||
agentId String @unique @db.Uuid
|
||||
agent Agents @relation(fields: [agentId], references: [id])
|
||||
agentId String @db.Uuid
|
||||
agent Agents @relation(fields: [agentId], references: [id], onDelete: Cascade)
|
||||
views Int
|
||||
downloads Int
|
||||
}
|
||||
@@ -61,7 +61,7 @@ enum InstallationLocation {
|
||||
model InstallTracker {
|
||||
id String @id @default(dbgenerated("gen_random_uuid()")) @db.Uuid
|
||||
marketplaceAgentId String @db.Uuid
|
||||
marketplaceAgent Agents @relation(fields: [marketplaceAgentId], references: [id])
|
||||
marketplaceAgent Agents @relation(fields: [marketplaceAgentId], references: [id], onDelete: Cascade)
|
||||
installedAgentId String @db.Uuid
|
||||
installationLocation InstallationLocation
|
||||
createdAt DateTime @default(now())
|
||||
@@ -72,7 +72,7 @@ model InstallTracker {
|
||||
model FeaturedAgent {
|
||||
id String @id @unique @default(dbgenerated("gen_random_uuid()")) @db.Uuid
|
||||
agentId String @unique @db.Uuid
|
||||
agent Agents @relation(fields: [agentId], references: [id])
|
||||
agent Agents @relation(fields: [agentId], references: [id], onDelete: Cascade)
|
||||
isActive Boolean @default(false)
|
||||
featuredCategories String[]
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@ -49,13 +49,15 @@ Follow these steps to create and test a new block:
|
||||
|
||||
4. **Implement the `__init__` method, including test data and mocks:**
|
||||
|
||||
!!! important
|
||||
Use UUID generator (e.g. https://www.uuidgenerator.net/) for every new block `id` and *do not* make up your own. Alternatively, you can run this python code to generate an uuid: `print(__import__('uuid').uuid4())`
|
||||
|
||||
```python
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
# Unique ID for the block, used across users for templates
|
||||
# you can generate this with this python one liner
|
||||
# print(__import__('uuid').uuid4())
|
||||
id="h5e7f8g9-1b2c-3d4e-5f6g-7h8i9j0k1l2m",
|
||||
# If you are an AI leave it as is or change to "generate-proper-uuid"
|
||||
id="xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx",
|
||||
input_schema=WikipediaSummaryBlock.Input, # Assign input schema
|
||||
output_schema=WikipediaSummaryBlock.Output, # Assign output schema
|
||||
|
||||
|
||||
Reference in New Issue
Block a user