diff --git a/autogpt_platform/backend/backend/api/external/v1/routes.py b/autogpt_platform/backend/backend/api/external/v1/routes.py
index 00933c1899..69a0c36637 100644
--- a/autogpt_platform/backend/backend/api/external/v1/routes.py
+++ b/autogpt_platform/backend/backend/api/external/v1/routes.py
@@ -10,7 +10,7 @@ from typing_extensions import TypedDict
import backend.api.features.store.cache as store_cache
import backend.api.features.store.model as store_model
-import backend.data.block
+import backend.blocks
from backend.api.external.middleware import require_permission
from backend.data import execution as execution_db
from backend.data import graph as graph_db
@@ -67,7 +67,7 @@ async def get_user_info(
dependencies=[Security(require_permission(APIKeyPermission.READ_BLOCK))],
)
async def get_graph_blocks() -> Sequence[dict[Any, Any]]:
- blocks = [block() for block in backend.data.block.get_blocks().values()]
+ blocks = [block() for block in backend.blocks.get_blocks().values()]
return [b.to_dict() for b in blocks if not b.disabled]
@@ -83,7 +83,7 @@ async def execute_graph_block(
require_permission(APIKeyPermission.EXECUTE_BLOCK)
),
) -> CompletedBlockOutput:
- obj = backend.data.block.get_block(block_id)
+ obj = backend.blocks.get_block(block_id)
if not obj:
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
if obj.disabled:
diff --git a/autogpt_platform/backend/backend/api/features/builder/db.py b/autogpt_platform/backend/backend/api/features/builder/db.py
index 7177fa4dc6..e8d35b0bb5 100644
--- a/autogpt_platform/backend/backend/api/features/builder/db.py
+++ b/autogpt_platform/backend/backend/api/features/builder/db.py
@@ -10,10 +10,15 @@ import backend.api.features.library.db as library_db
import backend.api.features.library.model as library_model
import backend.api.features.store.db as store_db
import backend.api.features.store.model as store_model
-import backend.data.block
from backend.blocks import load_all_blocks
+from backend.blocks._base import (
+ AnyBlockSchema,
+ BlockCategory,
+ BlockInfo,
+ BlockSchema,
+ BlockType,
+)
from backend.blocks.llm import LlmModel
-from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema
from backend.data.db import query_raw_with_schema
from backend.integrations.providers import ProviderName
from backend.util.cache import cached
@@ -22,7 +27,7 @@ from backend.util.models import Pagination
from .model import (
BlockCategoryResponse,
BlockResponse,
- BlockType,
+ BlockTypeFilter,
CountResponse,
FilterType,
Provider,
@@ -88,7 +93,7 @@ def get_block_categories(category_blocks: int = 3) -> list[BlockCategoryResponse
def get_blocks(
*,
category: str | None = None,
- type: BlockType | None = None,
+ type: BlockTypeFilter | None = None,
provider: ProviderName | None = None,
page: int = 1,
page_size: int = 50,
@@ -669,9 +674,9 @@ async def get_suggested_blocks(count: int = 5) -> list[BlockInfo]:
for block_type in load_all_blocks().values():
block: AnyBlockSchema = block_type()
if block.disabled or block.block_type in (
- backend.data.block.BlockType.INPUT,
- backend.data.block.BlockType.OUTPUT,
- backend.data.block.BlockType.AGENT,
+ BlockType.INPUT,
+ BlockType.OUTPUT,
+ BlockType.AGENT,
):
continue
# Find the execution count for this block
diff --git a/autogpt_platform/backend/backend/api/features/builder/model.py b/autogpt_platform/backend/backend/api/features/builder/model.py
index fcd19dba94..8aa8ed06ed 100644
--- a/autogpt_platform/backend/backend/api/features/builder/model.py
+++ b/autogpt_platform/backend/backend/api/features/builder/model.py
@@ -4,7 +4,7 @@ from pydantic import BaseModel
import backend.api.features.library.model as library_model
import backend.api.features.store.model as store_model
-from backend.data.block import BlockInfo
+from backend.blocks._base import BlockInfo
from backend.integrations.providers import ProviderName
from backend.util.models import Pagination
@@ -15,7 +15,7 @@ FilterType = Literal[
"my_agents",
]
-BlockType = Literal["all", "input", "action", "output"]
+BlockTypeFilter = Literal["all", "input", "action", "output"]
class SearchEntry(BaseModel):
diff --git a/autogpt_platform/backend/backend/api/features/builder/routes.py b/autogpt_platform/backend/backend/api/features/builder/routes.py
index 15b922178d..091f477178 100644
--- a/autogpt_platform/backend/backend/api/features/builder/routes.py
+++ b/autogpt_platform/backend/backend/api/features/builder/routes.py
@@ -88,7 +88,7 @@ async def get_block_categories(
)
async def get_blocks(
category: Annotated[str | None, fastapi.Query()] = None,
- type: Annotated[builder_model.BlockType | None, fastapi.Query()] = None,
+ type: Annotated[builder_model.BlockTypeFilter | None, fastapi.Query()] = None,
provider: Annotated[ProviderName | None, fastapi.Query()] = None,
page: Annotated[int, fastapi.Query()] = 1,
page_size: Annotated[int, fastapi.Query()] = 50,
diff --git a/autogpt_platform/backend/backend/api/features/chat/tools/find_block.py b/autogpt_platform/backend/backend/api/features/chat/tools/find_block.py
index b455dad1ad..55b1c0d510 100644
--- a/autogpt_platform/backend/backend/api/features/chat/tools/find_block.py
+++ b/autogpt_platform/backend/backend/api/features/chat/tools/find_block.py
@@ -12,7 +12,8 @@ from backend.api.features.chat.tools.models import (
NoResultsResponse,
)
from backend.api.features.store.hybrid_search import unified_hybrid_search
-from backend.data.block import BlockType, get_block
+from backend.blocks import get_block
+from backend.blocks._base import BlockType
logger = logging.getLogger(__name__)
diff --git a/autogpt_platform/backend/backend/api/features/chat/tools/find_block_test.py b/autogpt_platform/backend/backend/api/features/chat/tools/find_block_test.py
index 8e7f9f7195..44606f81c3 100644
--- a/autogpt_platform/backend/backend/api/features/chat/tools/find_block_test.py
+++ b/autogpt_platform/backend/backend/api/features/chat/tools/find_block_test.py
@@ -10,7 +10,7 @@ from backend.api.features.chat.tools.find_block import (
FindBlockTool,
)
from backend.api.features.chat.tools.models import BlockListResponse
-from backend.data.block import BlockType
+from backend.blocks._base import BlockType
from ._test_data import make_session
diff --git a/autogpt_platform/backend/backend/api/features/chat/tools/run_block.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_block.py
index b98271dc1e..7d092d79ab 100644
--- a/autogpt_platform/backend/backend/api/features/chat/tools/run_block.py
+++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_block.py
@@ -12,7 +12,8 @@ from backend.api.features.chat.tools.find_block import (
COPILOT_EXCLUDED_BLOCK_IDS,
COPILOT_EXCLUDED_BLOCK_TYPES,
)
-from backend.data.block import AnyBlockSchema, get_block
+from backend.blocks import get_block
+from backend.blocks._base import AnyBlockSchema
from backend.data.execution import ExecutionContext
from backend.data.model import CredentialsFieldInfo, CredentialsMetaInput
from backend.data.workspace import get_or_create_workspace
diff --git a/autogpt_platform/backend/backend/api/features/chat/tools/run_block_test.py b/autogpt_platform/backend/backend/api/features/chat/tools/run_block_test.py
index 2aae45e875..aadc161155 100644
--- a/autogpt_platform/backend/backend/api/features/chat/tools/run_block_test.py
+++ b/autogpt_platform/backend/backend/api/features/chat/tools/run_block_test.py
@@ -6,7 +6,7 @@ import pytest
from backend.api.features.chat.tools.models import ErrorResponse
from backend.api.features.chat.tools.run_block import RunBlockTool
-from backend.data.block import BlockType
+from backend.blocks._base import BlockType
from ._test_data import make_session
diff --git a/autogpt_platform/backend/backend/api/features/library/db.py b/autogpt_platform/backend/backend/api/features/library/db.py
index 32479c18a3..e07ed9f7ad 100644
--- a/autogpt_platform/backend/backend/api/features/library/db.py
+++ b/autogpt_platform/backend/backend/api/features/library/db.py
@@ -12,12 +12,11 @@ import backend.api.features.store.image_gen as store_image_gen
import backend.api.features.store.media as store_media
import backend.data.graph as graph_db
import backend.data.integrations as integrations_db
-from backend.data.block import BlockInput
from backend.data.db import transaction
from backend.data.execution import get_graph_execution
from backend.data.graph import GraphSettings
from backend.data.includes import AGENT_PRESET_INCLUDE, library_agent_include
-from backend.data.model import CredentialsMetaInput
+from backend.data.model import CredentialsMetaInput, GraphInput
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.integrations.webhooks.graph_lifecycle_hooks import (
on_graph_activate,
@@ -1130,7 +1129,7 @@ async def create_preset_from_graph_execution(
async def update_preset(
user_id: str,
preset_id: str,
- inputs: Optional[BlockInput] = None,
+ inputs: Optional[GraphInput] = None,
credentials: Optional[dict[str, CredentialsMetaInput]] = None,
name: Optional[str] = None,
description: Optional[str] = None,
diff --git a/autogpt_platform/backend/backend/api/features/library/model.py b/autogpt_platform/backend/backend/api/features/library/model.py
index c6bc0e0427..9ecbaecccb 100644
--- a/autogpt_platform/backend/backend/api/features/library/model.py
+++ b/autogpt_platform/backend/backend/api/features/library/model.py
@@ -6,9 +6,12 @@ import prisma.enums
import prisma.models
import pydantic
-from backend.data.block import BlockInput
from backend.data.graph import GraphModel, GraphSettings, GraphTriggerInfo
-from backend.data.model import CredentialsMetaInput, is_credentials_field_name
+from backend.data.model import (
+ CredentialsMetaInput,
+ GraphInput,
+ is_credentials_field_name,
+)
from backend.util.json import loads as json_loads
from backend.util.models import Pagination
@@ -323,7 +326,7 @@ class LibraryAgentPresetCreatable(pydantic.BaseModel):
graph_id: str
graph_version: int
- inputs: BlockInput
+ inputs: GraphInput
credentials: dict[str, CredentialsMetaInput]
name: str
@@ -352,7 +355,7 @@ class LibraryAgentPresetUpdatable(pydantic.BaseModel):
Request model used when updating a preset for a library agent.
"""
- inputs: Optional[BlockInput] = None
+ inputs: Optional[GraphInput] = None
credentials: Optional[dict[str, CredentialsMetaInput]] = None
name: Optional[str] = None
@@ -395,7 +398,7 @@ class LibraryAgentPreset(LibraryAgentPresetCreatable):
"Webhook must be included in AgentPreset query when webhookId is set"
)
- input_data: BlockInput = {}
+ input_data: GraphInput = {}
input_credentials: dict[str, CredentialsMetaInput] = {}
for preset_input in preset.InputPresets:
diff --git a/autogpt_platform/backend/backend/api/features/otto/service.py b/autogpt_platform/backend/backend/api/features/otto/service.py
index 5f00022ff2..992021c0ca 100644
--- a/autogpt_platform/backend/backend/api/features/otto/service.py
+++ b/autogpt_platform/backend/backend/api/features/otto/service.py
@@ -5,8 +5,8 @@ from typing import Optional
import aiohttp
from fastapi import HTTPException
+from backend.blocks import get_block
from backend.data import graph as graph_db
-from backend.data.block import get_block
from backend.util.settings import Settings
from .models import ApiResponse, ChatRequest, GraphData
diff --git a/autogpt_platform/backend/backend/api/features/store/content_handlers.py b/autogpt_platform/backend/backend/api/features/store/content_handlers.py
index cbbdcfbebf..38fc1e27d0 100644
--- a/autogpt_platform/backend/backend/api/features/store/content_handlers.py
+++ b/autogpt_platform/backend/backend/api/features/store/content_handlers.py
@@ -152,7 +152,7 @@ class BlockHandler(ContentHandler):
async def get_missing_items(self, batch_size: int) -> list[ContentItem]:
"""Fetch blocks without embeddings."""
- from backend.data.block import get_blocks
+ from backend.blocks import get_blocks
# Get all available blocks
all_blocks = get_blocks()
@@ -249,7 +249,7 @@ class BlockHandler(ContentHandler):
async def get_stats(self) -> dict[str, int]:
"""Get statistics about block embedding coverage."""
- from backend.data.block import get_blocks
+ from backend.blocks import get_blocks
all_blocks = get_blocks()
diff --git a/autogpt_platform/backend/backend/api/features/store/content_handlers_test.py b/autogpt_platform/backend/backend/api/features/store/content_handlers_test.py
index fee879fae0..c552e44a9d 100644
--- a/autogpt_platform/backend/backend/api/features/store/content_handlers_test.py
+++ b/autogpt_platform/backend/backend/api/features/store/content_handlers_test.py
@@ -93,7 +93,7 @@ async def test_block_handler_get_missing_items(mocker):
mock_existing = []
with patch(
- "backend.data.block.get_blocks",
+ "backend.blocks.get_blocks",
return_value=mock_blocks,
):
with patch(
@@ -135,7 +135,7 @@ async def test_block_handler_get_stats(mocker):
mock_embedded = [{"count": 2}]
with patch(
- "backend.data.block.get_blocks",
+ "backend.blocks.get_blocks",
return_value=mock_blocks,
):
with patch(
@@ -327,7 +327,7 @@ async def test_block_handler_handles_missing_attributes():
mock_blocks = {"block-minimal": mock_block_class}
with patch(
- "backend.data.block.get_blocks",
+ "backend.blocks.get_blocks",
return_value=mock_blocks,
):
with patch(
@@ -360,7 +360,7 @@ async def test_block_handler_skips_failed_blocks():
mock_blocks = {"good-block": good_block, "bad-block": bad_block}
with patch(
- "backend.data.block.get_blocks",
+ "backend.blocks.get_blocks",
return_value=mock_blocks,
):
with patch(
diff --git a/autogpt_platform/backend/backend/api/features/store/embeddings.py b/autogpt_platform/backend/backend/api/features/store/embeddings.py
index 434f2fe2ce..921e103618 100644
--- a/autogpt_platform/backend/backend/api/features/store/embeddings.py
+++ b/autogpt_platform/backend/backend/api/features/store/embeddings.py
@@ -662,7 +662,7 @@ async def cleanup_orphaned_embeddings() -> dict[str, Any]:
)
current_ids = {row["id"] for row in valid_agents}
elif content_type == ContentType.BLOCK:
- from backend.data.block import get_blocks
+ from backend.blocks import get_blocks
current_ids = set(get_blocks().keys())
elif content_type == ContentType.DOCUMENTATION:
diff --git a/autogpt_platform/backend/backend/api/features/store/image_gen.py b/autogpt_platform/backend/backend/api/features/store/image_gen.py
index 087a7895ba..64ac203182 100644
--- a/autogpt_platform/backend/backend/api/features/store/image_gen.py
+++ b/autogpt_platform/backend/backend/api/features/store/image_gen.py
@@ -7,15 +7,6 @@ from replicate.client import Client as ReplicateClient
from replicate.exceptions import ReplicateError
from replicate.helpers import FileOutput
-from backend.blocks.ideogram import (
- AspectRatio,
- ColorPalettePreset,
- IdeogramModelBlock,
- IdeogramModelName,
- MagicPromptOption,
- StyleType,
- UpscaleOption,
-)
from backend.data.graph import GraphBaseMeta
from backend.data.model import CredentialsMetaInput, ProviderName
from backend.integrations.credentials_store import ideogram_credentials
@@ -50,6 +41,16 @@ async def generate_agent_image_v2(graph: GraphBaseMeta | AgentGraph) -> io.Bytes
if not ideogram_credentials.api_key:
raise ValueError("Missing Ideogram API key")
+ from backend.blocks.ideogram import (
+ AspectRatio,
+ ColorPalettePreset,
+ IdeogramModelBlock,
+ IdeogramModelName,
+ MagicPromptOption,
+ StyleType,
+ UpscaleOption,
+ )
+
name = graph.name
description = f"{name} ({graph.description})" if graph.description else name
diff --git a/autogpt_platform/backend/backend/api/features/v1.py b/autogpt_platform/backend/backend/api/features/v1.py
index a8610702cc..dd8ef3611f 100644
--- a/autogpt_platform/backend/backend/api/features/v1.py
+++ b/autogpt_platform/backend/backend/api/features/v1.py
@@ -40,10 +40,11 @@ from backend.api.model import (
UpdateTimezoneRequest,
UploadFileResponse,
)
+from backend.blocks import get_block, get_blocks
from backend.data import execution as execution_db
from backend.data import graph as graph_db
from backend.data.auth import api_key as api_key_db
-from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks
+from backend.data.block import BlockInput, CompletedBlockOutput
from backend.data.credit import (
AutoTopUpConfig,
RefundRequest,
diff --git a/autogpt_platform/backend/backend/blocks/__init__.py b/autogpt_platform/backend/backend/blocks/__init__.py
index a6c16393c7..524e47c31d 100644
--- a/autogpt_platform/backend/backend/blocks/__init__.py
+++ b/autogpt_platform/backend/backend/blocks/__init__.py
@@ -3,22 +3,19 @@ import logging
import os
import re
from pathlib import Path
-from typing import TYPE_CHECKING, TypeVar
+from typing import Sequence, Type, TypeVar
+from backend.blocks._base import AnyBlockSchema, BlockType
from backend.util.cache import cached
logger = logging.getLogger(__name__)
-
-if TYPE_CHECKING:
- from backend.data.block import Block
-
T = TypeVar("T")
@cached(ttl_seconds=3600)
-def load_all_blocks() -> dict[str, type["Block"]]:
- from backend.data.block import Block
+def load_all_blocks() -> dict[str, type["AnyBlockSchema"]]:
+ from backend.blocks._base import Block
from backend.util.settings import Config
# Check if example blocks should be loaded from settings
@@ -50,8 +47,8 @@ def load_all_blocks() -> dict[str, type["Block"]]:
importlib.import_module(f".{module}", package=__name__)
# Load all Block instances from the available modules
- available_blocks: dict[str, type["Block"]] = {}
- for block_cls in all_subclasses(Block):
+ available_blocks: dict[str, type["AnyBlockSchema"]] = {}
+ for block_cls in _all_subclasses(Block):
class_name = block_cls.__name__
if class_name.endswith("Base"):
@@ -64,7 +61,7 @@ def load_all_blocks() -> dict[str, type["Block"]]:
"please name the class with 'Base' at the end"
)
- block = block_cls.create()
+ block = block_cls() # pyright: ignore[reportAbstractUsage]
if not isinstance(block.id, str) or len(block.id) != 36:
raise ValueError(
@@ -105,7 +102,7 @@ def load_all_blocks() -> dict[str, type["Block"]]:
available_blocks[block.id] = block_cls
# Filter out blocks with incomplete auth configs, e.g. missing OAuth server secrets
- from backend.data.block import is_block_auth_configured
+ from ._utils import is_block_auth_configured
filtered_blocks = {}
for block_id, block_cls in available_blocks.items():
@@ -115,11 +112,48 @@ def load_all_blocks() -> dict[str, type["Block"]]:
return filtered_blocks
-__all__ = ["load_all_blocks"]
-
-
-def all_subclasses(cls: type[T]) -> list[type[T]]:
+def _all_subclasses(cls: type[T]) -> list[type[T]]:
subclasses = cls.__subclasses__()
for subclass in subclasses:
- subclasses += all_subclasses(subclass)
+ subclasses += _all_subclasses(subclass)
return subclasses
+
+
+# ============== Block access helper functions ============== #
+
+
+def get_blocks() -> dict[str, Type["AnyBlockSchema"]]:
+ return load_all_blocks()
+
+
+# Note on the return type annotation: https://github.com/microsoft/pyright/issues/10281
+def get_block(block_id: str) -> "AnyBlockSchema | None":
+ cls = get_blocks().get(block_id)
+ return cls() if cls else None
+
+
+@cached(ttl_seconds=3600)
+def get_webhook_block_ids() -> Sequence[str]:
+ return [
+ id
+ for id, B in get_blocks().items()
+ if B().block_type in (BlockType.WEBHOOK, BlockType.WEBHOOK_MANUAL)
+ ]
+
+
+@cached(ttl_seconds=3600)
+def get_io_block_ids() -> Sequence[str]:
+ return [
+ id
+ for id, B in get_blocks().items()
+ if B().block_type in (BlockType.INPUT, BlockType.OUTPUT)
+ ]
+
+
+@cached(ttl_seconds=3600)
+def get_human_in_the_loop_block_ids() -> Sequence[str]:
+ return [
+ id
+ for id, B in get_blocks().items()
+ if B().block_type == BlockType.HUMAN_IN_THE_LOOP
+ ]
diff --git a/autogpt_platform/backend/backend/blocks/_base.py b/autogpt_platform/backend/backend/blocks/_base.py
new file mode 100644
index 0000000000..0ba4daec40
--- /dev/null
+++ b/autogpt_platform/backend/backend/blocks/_base.py
@@ -0,0 +1,739 @@
+import inspect
+import logging
+from abc import ABC, abstractmethod
+from enum import Enum
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ ClassVar,
+ Generic,
+ Optional,
+ Type,
+ TypeAlias,
+ TypeVar,
+ cast,
+ get_origin,
+)
+
+import jsonref
+import jsonschema
+from pydantic import BaseModel
+
+from backend.data.block import BlockInput, BlockOutput, BlockOutputEntry
+from backend.data.model import (
+ Credentials,
+ CredentialsFieldInfo,
+ CredentialsMetaInput,
+ SchemaField,
+ is_credentials_field_name,
+)
+from backend.integrations.providers import ProviderName
+from backend.util import json
+from backend.util.exceptions import (
+ BlockError,
+ BlockExecutionError,
+ BlockInputError,
+ BlockOutputError,
+ BlockUnknownError,
+)
+from backend.util.settings import Config
+
+logger = logging.getLogger(__name__)
+
+if TYPE_CHECKING:
+ from backend.data.execution import ExecutionContext
+ from backend.data.model import ContributorDetails, NodeExecutionStats
+
+ from ..data.graph import Link
+
+app_config = Config()
+
+
+BlockTestOutput = BlockOutputEntry | tuple[str, Callable[[Any], bool]]
+
+
+class BlockType(Enum):
+ STANDARD = "Standard"
+ INPUT = "Input"
+ OUTPUT = "Output"
+ NOTE = "Note"
+ WEBHOOK = "Webhook"
+ WEBHOOK_MANUAL = "Webhook (manual)"
+ AGENT = "Agent"
+ AI = "AI"
+ AYRSHARE = "Ayrshare"
+ HUMAN_IN_THE_LOOP = "Human In The Loop"
+
+
+class BlockCategory(Enum):
+ AI = "Block that leverages AI to perform a task."
+ SOCIAL = "Block that interacts with social media platforms."
+ TEXT = "Block that processes text data."
+ SEARCH = "Block that searches or extracts information from the internet."
+ BASIC = "Block that performs basic operations."
+ INPUT = "Block that interacts with input of the graph."
+ OUTPUT = "Block that interacts with output of the graph."
+ LOGIC = "Programming logic to control the flow of your agent"
+ COMMUNICATION = "Block that interacts with communication platforms."
+ DEVELOPER_TOOLS = "Developer tools such as GitHub blocks."
+ DATA = "Block that interacts with structured data."
+ HARDWARE = "Block that interacts with hardware."
+ AGENT = "Block that interacts with other agents."
+ CRM = "Block that interacts with CRM services."
+ SAFETY = (
+ "Block that provides AI safety mechanisms such as detecting harmful content"
+ )
+ PRODUCTIVITY = "Block that helps with productivity"
+ ISSUE_TRACKING = "Block that helps with issue tracking"
+ MULTIMEDIA = "Block that interacts with multimedia content"
+ MARKETING = "Block that helps with marketing"
+
+ def dict(self) -> dict[str, str]:
+ return {"category": self.name, "description": self.value}
+
+
+class BlockCostType(str, Enum):
+ RUN = "run" # cost X credits per run
+ BYTE = "byte" # cost X credits per byte
+ SECOND = "second" # cost X credits per second
+
+
+class BlockCost(BaseModel):
+ cost_amount: int
+ cost_filter: BlockInput
+ cost_type: BlockCostType
+
+ def __init__(
+ self,
+ cost_amount: int,
+ cost_type: BlockCostType = BlockCostType.RUN,
+ cost_filter: Optional[BlockInput] = None,
+ **data: Any,
+ ) -> None:
+ super().__init__(
+ cost_amount=cost_amount,
+ cost_filter=cost_filter or {},
+ cost_type=cost_type,
+ **data,
+ )
+
+
+class BlockInfo(BaseModel):
+ id: str
+ name: str
+ inputSchema: dict[str, Any]
+ outputSchema: dict[str, Any]
+ costs: list[BlockCost]
+ description: str
+ categories: list[dict[str, str]]
+ contributors: list[dict[str, Any]]
+ staticOutput: bool
+ uiType: str
+
+
+class BlockSchema(BaseModel):
+ cached_jsonschema: ClassVar[dict[str, Any]]
+
+ @classmethod
+ def jsonschema(cls) -> dict[str, Any]:
+ if cls.cached_jsonschema:
+ return cls.cached_jsonschema
+
+ model = jsonref.replace_refs(cls.model_json_schema(), merge_props=True)
+
+ def ref_to_dict(obj):
+ if isinstance(obj, dict):
+ # OpenAPI <3.1 does not support sibling fields that has a $ref key
+ # So sometimes, the schema has an "allOf"/"anyOf"/"oneOf" with 1 item.
+ keys = {"allOf", "anyOf", "oneOf"}
+ one_key = next((k for k in keys if k in obj and len(obj[k]) == 1), None)
+ if one_key:
+ obj.update(obj[one_key][0])
+
+ return {
+ key: ref_to_dict(value)
+ for key, value in obj.items()
+ if not key.startswith("$") and key != one_key
+ }
+ elif isinstance(obj, list):
+ return [ref_to_dict(item) for item in obj]
+
+ return obj
+
+ cls.cached_jsonschema = cast(dict[str, Any], ref_to_dict(model))
+
+ return cls.cached_jsonschema
+
+ @classmethod
+ def validate_data(cls, data: BlockInput) -> str | None:
+ return json.validate_with_jsonschema(
+ schema=cls.jsonschema(),
+ data={k: v for k, v in data.items() if v is not None},
+ )
+
+ @classmethod
+ def get_mismatch_error(cls, data: BlockInput) -> str | None:
+ return cls.validate_data(data)
+
+ @classmethod
+ def get_field_schema(cls, field_name: str) -> dict[str, Any]:
+ model_schema = cls.jsonschema().get("properties", {})
+ if not model_schema:
+ raise ValueError(f"Invalid model schema {cls}")
+
+ property_schema = model_schema.get(field_name)
+ if not property_schema:
+ raise ValueError(f"Invalid property name {field_name}")
+
+ return property_schema
+
+ @classmethod
+ def validate_field(cls, field_name: str, data: BlockInput) -> str | None:
+ """
+ Validate the data against a specific property (one of the input/output name).
+ Returns the validation error message if the data does not match the schema.
+ """
+ try:
+ property_schema = cls.get_field_schema(field_name)
+ jsonschema.validate(json.to_dict(data), property_schema)
+ return None
+ except jsonschema.ValidationError as e:
+ return str(e)
+
+ @classmethod
+ def get_fields(cls) -> set[str]:
+ return set(cls.model_fields.keys())
+
+ @classmethod
+ def get_required_fields(cls) -> set[str]:
+ return {
+ field
+ for field, field_info in cls.model_fields.items()
+ if field_info.is_required()
+ }
+
+ @classmethod
+ def __pydantic_init_subclass__(cls, **kwargs):
+ """Validates the schema definition. Rules:
+ - Fields with annotation `CredentialsMetaInput` MUST be
+ named `credentials` or `*_credentials`
+ - Fields named `credentials` or `*_credentials` MUST be
+ of type `CredentialsMetaInput`
+ """
+ super().__pydantic_init_subclass__(**kwargs)
+
+ # Reset cached JSON schema to prevent inheriting it from parent class
+ cls.cached_jsonschema = {}
+
+ credentials_fields = cls.get_credentials_fields()
+
+ for field_name in cls.get_fields():
+ if is_credentials_field_name(field_name):
+ if field_name not in credentials_fields:
+ raise TypeError(
+ f"Credentials field '{field_name}' on {cls.__qualname__} "
+ f"is not of type {CredentialsMetaInput.__name__}"
+ )
+
+ CredentialsMetaInput.validate_credentials_field_schema(
+ cls.get_field_schema(field_name), field_name
+ )
+
+ elif field_name in credentials_fields:
+ raise KeyError(
+ f"Credentials field '{field_name}' on {cls.__qualname__} "
+ "has invalid name: must be 'credentials' or *_credentials"
+ )
+
+ @classmethod
+ def get_credentials_fields(cls) -> dict[str, type[CredentialsMetaInput]]:
+ return {
+ field_name: info.annotation
+ for field_name, info in cls.model_fields.items()
+ if (
+ inspect.isclass(info.annotation)
+ and issubclass(
+ get_origin(info.annotation) or info.annotation,
+ CredentialsMetaInput,
+ )
+ )
+ }
+
+ @classmethod
+ def get_auto_credentials_fields(cls) -> dict[str, dict[str, Any]]:
+ """
+ Get fields that have auto_credentials metadata (e.g., GoogleDriveFileInput).
+
+ Returns a dict mapping kwarg_name -> {field_name, auto_credentials_config}
+
+ Raises:
+ ValueError: If multiple fields have the same kwarg_name, as this would
+ cause silent overwriting and only the last field would be processed.
+ """
+ result: dict[str, dict[str, Any]] = {}
+ schema = cls.jsonschema()
+ properties = schema.get("properties", {})
+
+ for field_name, field_schema in properties.items():
+ auto_creds = field_schema.get("auto_credentials")
+ if auto_creds:
+ kwarg_name = auto_creds.get("kwarg_name", "credentials")
+ if kwarg_name in result:
+ raise ValueError(
+ f"Duplicate auto_credentials kwarg_name '{kwarg_name}' "
+ f"in fields '{result[kwarg_name]['field_name']}' and "
+ f"'{field_name}' on {cls.__qualname__}"
+ )
+ result[kwarg_name] = {
+ "field_name": field_name,
+ "config": auto_creds,
+ }
+ return result
+
+ @classmethod
+ def get_credentials_fields_info(cls) -> dict[str, CredentialsFieldInfo]:
+ result = {}
+
+ # Regular credentials fields
+ for field_name in cls.get_credentials_fields().keys():
+ result[field_name] = CredentialsFieldInfo.model_validate(
+ cls.get_field_schema(field_name), by_alias=True
+ )
+
+ # Auto-generated credentials fields (from GoogleDriveFileInput etc.)
+ for kwarg_name, info in cls.get_auto_credentials_fields().items():
+ config = info["config"]
+ # Build a schema-like dict that CredentialsFieldInfo can parse
+ auto_schema = {
+ "credentials_provider": [config.get("provider", "google")],
+ "credentials_types": [config.get("type", "oauth2")],
+ "credentials_scopes": config.get("scopes"),
+ }
+ result[kwarg_name] = CredentialsFieldInfo.model_validate(
+ auto_schema, by_alias=True
+ )
+
+ return result
+
+ @classmethod
+ def get_input_defaults(cls, data: BlockInput) -> BlockInput:
+ return data # Return as is, by default.
+
+ @classmethod
+ def get_missing_links(cls, data: BlockInput, links: list["Link"]) -> set[str]:
+ input_fields_from_nodes = {link.sink_name for link in links}
+ return input_fields_from_nodes - set(data)
+
+ @classmethod
+ def get_missing_input(cls, data: BlockInput) -> set[str]:
+ return cls.get_required_fields() - set(data)
+
+
+class BlockSchemaInput(BlockSchema):
+ """
+ Base schema class for block inputs.
+ All block input schemas should extend this class for consistency.
+ """
+
+ pass
+
+
+class BlockSchemaOutput(BlockSchema):
+ """
+ Base schema class for block outputs that includes a standard error field.
+ All block output schemas should extend this class to ensure consistent error handling.
+ """
+
+ error: str = SchemaField(
+ description="Error message if the operation failed", default=""
+ )
+
+
+BlockSchemaInputType = TypeVar("BlockSchemaInputType", bound=BlockSchemaInput)
+BlockSchemaOutputType = TypeVar("BlockSchemaOutputType", bound=BlockSchemaOutput)
+
+
+class EmptyInputSchema(BlockSchemaInput):
+ pass
+
+
+class EmptyOutputSchema(BlockSchemaOutput):
+ pass
+
+
+# For backward compatibility - will be deprecated
+EmptySchema = EmptyOutputSchema
+
+
+# --8<-- [start:BlockWebhookConfig]
+class BlockManualWebhookConfig(BaseModel):
+ """
+ Configuration model for webhook-triggered blocks on which
+ the user has to manually set up the webhook at the provider.
+ """
+
+ provider: ProviderName
+ """The service provider that the webhook connects to"""
+
+ webhook_type: str
+ """
+ Identifier for the webhook type. E.g. GitHub has repo and organization level hooks.
+
+ Only for use in the corresponding `WebhooksManager`.
+ """
+
+ event_filter_input: str = ""
+ """
+ Name of the block's event filter input.
+ Leave empty if the corresponding webhook doesn't have distinct event/payload types.
+ """
+
+ event_format: str = "{event}"
+ """
+ Template string for the event(s) that a block instance subscribes to.
+ Applied individually to each event selected in the event filter input.
+
+ Example: `"pull_request.{event}"` -> `"pull_request.opened"`
+ """
+
+
+class BlockWebhookConfig(BlockManualWebhookConfig):
+ """
+ Configuration model for webhook-triggered blocks for which
+ the webhook can be automatically set up through the provider's API.
+ """
+
+ resource_format: str
+ """
+ Template string for the resource that a block instance subscribes to.
+ Fields will be filled from the block's inputs (except `payload`).
+
+ Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented)
+
+ Only for use in the corresponding `WebhooksManager`.
+ """
+ # --8<-- [end:BlockWebhookConfig]
+
+
+class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
+ def __init__(
+ self,
+ id: str = "",
+ description: str = "",
+ contributors: list["ContributorDetails"] = [],
+ categories: set[BlockCategory] | None = None,
+ input_schema: Type[BlockSchemaInputType] = EmptyInputSchema,
+ output_schema: Type[BlockSchemaOutputType] = EmptyOutputSchema,
+ test_input: BlockInput | list[BlockInput] | None = None,
+ test_output: BlockTestOutput | list[BlockTestOutput] | None = None,
+ test_mock: dict[str, Any] | None = None,
+ test_credentials: Optional[Credentials | dict[str, Credentials]] = None,
+ disabled: bool = False,
+ static_output: bool = False,
+ block_type: BlockType = BlockType.STANDARD,
+ webhook_config: Optional[BlockWebhookConfig | BlockManualWebhookConfig] = None,
+ is_sensitive_action: bool = False,
+ ):
+ """
+ Initialize the block with the given schema.
+
+ Args:
+ id: The unique identifier for the block, this value will be persisted in the
+ DB. So it should be a unique and constant across the application run.
+ Use the UUID format for the ID.
+ description: The description of the block, explaining what the block does.
+ contributors: The list of contributors who contributed to the block.
+ input_schema: The schema, defined as a Pydantic model, for the input data.
+ output_schema: The schema, defined as a Pydantic model, for the output data.
+ test_input: The list or single sample input data for the block, for testing.
+ test_output: The list or single expected output if the test_input is run.
+ test_mock: function names on the block implementation to mock on test run.
+ disabled: If the block is disabled, it will not be available for execution.
+ static_output: Whether the output links of the block are static by default.
+ """
+ from backend.data.model import NodeExecutionStats
+
+ self.id = id
+ self.input_schema = input_schema
+ self.output_schema = output_schema
+ self.test_input = test_input
+ self.test_output = test_output
+ self.test_mock = test_mock
+ self.test_credentials = test_credentials
+ self.description = description
+ self.categories = categories or set()
+ self.contributors = contributors or set()
+ self.disabled = disabled
+ self.static_output = static_output
+ self.block_type = block_type
+ self.webhook_config = webhook_config
+ self.is_sensitive_action = is_sensitive_action
+ self.execution_stats: "NodeExecutionStats" = NodeExecutionStats()
+
+ if self.webhook_config:
+ if isinstance(self.webhook_config, BlockWebhookConfig):
+ # Enforce presence of credentials field on auto-setup webhook blocks
+ if not (cred_fields := self.input_schema.get_credentials_fields()):
+ raise TypeError(
+ "credentials field is required on auto-setup webhook blocks"
+ )
+ # Disallow multiple credentials inputs on webhook blocks
+ elif len(cred_fields) > 1:
+ raise ValueError(
+ "Multiple credentials inputs not supported on webhook blocks"
+ )
+
+ self.block_type = BlockType.WEBHOOK
+ else:
+ self.block_type = BlockType.WEBHOOK_MANUAL
+
+ # Enforce shape of webhook event filter, if present
+ if self.webhook_config.event_filter_input:
+ event_filter_field = self.input_schema.model_fields[
+ self.webhook_config.event_filter_input
+ ]
+ if not (
+ isinstance(event_filter_field.annotation, type)
+ and issubclass(event_filter_field.annotation, BaseModel)
+ and all(
+ field.annotation is bool
+ for field in event_filter_field.annotation.model_fields.values()
+ )
+ ):
+ raise NotImplementedError(
+ f"{self.name} has an invalid webhook event selector: "
+ "field must be a BaseModel and all its fields must be boolean"
+ )
+
+ # Enforce presence of 'payload' input
+ if "payload" not in self.input_schema.model_fields:
+ raise TypeError(
+ f"{self.name} is webhook-triggered but has no 'payload' input"
+ )
+
+ # Disable webhook-triggered block if webhook functionality not available
+ if not app_config.platform_base_url:
+ self.disabled = True
+
+ @abstractmethod
+ async def run(self, input_data: BlockSchemaInputType, **kwargs) -> BlockOutput:
+ """
+ Run the block with the given input data.
+ Args:
+ input_data: The input data with the structure of input_schema.
+
+ Kwargs: Currently 14/02/2025 these include
+ graph_id: The ID of the graph.
+ node_id: The ID of the node.
+ graph_exec_id: The ID of the graph execution.
+ node_exec_id: The ID of the node execution.
+ user_id: The ID of the user.
+
+ Returns:
+ A Generator that yields (output_name, output_data).
+ output_name: One of the output name defined in Block's output_schema.
+ output_data: The data for the output_name, matching the defined schema.
+ """
+ # --- satisfy the type checker, never executed -------------
+ if False: # noqa: SIM115
+ yield "name", "value" # pyright: ignore[reportMissingYield]
+ raise NotImplementedError(f"{self.name} does not implement the run method.")
+
+ async def run_once(
+ self, input_data: BlockSchemaInputType, output: str, **kwargs
+ ) -> Any:
+ async for item in self.run(input_data, **kwargs):
+ name, data = item
+ if name == output:
+ return data
+ raise ValueError(f"{self.name} did not produce any output for {output}")
+
+ def merge_stats(self, stats: "NodeExecutionStats") -> "NodeExecutionStats":
+ self.execution_stats += stats
+ return self.execution_stats
+
+ @property
+ def name(self):
+ return self.__class__.__name__
+
+ def to_dict(self):
+ return {
+ "id": self.id,
+ "name": self.name,
+ "inputSchema": self.input_schema.jsonschema(),
+ "outputSchema": self.output_schema.jsonschema(),
+ "description": self.description,
+ "categories": [category.dict() for category in self.categories],
+ "contributors": [
+ contributor.model_dump() for contributor in self.contributors
+ ],
+ "staticOutput": self.static_output,
+ "uiType": self.block_type.value,
+ }
+
+ def get_info(self) -> BlockInfo:
+ from backend.data.credit import get_block_cost
+
+ return BlockInfo(
+ id=self.id,
+ name=self.name,
+ inputSchema=self.input_schema.jsonschema(),
+ outputSchema=self.output_schema.jsonschema(),
+ costs=get_block_cost(self),
+ description=self.description,
+ categories=[category.dict() for category in self.categories],
+ contributors=[
+ contributor.model_dump() for contributor in self.contributors
+ ],
+ staticOutput=self.static_output,
+ uiType=self.block_type.value,
+ )
+
+ async def execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
+ try:
+ async for output_name, output_data in self._execute(input_data, **kwargs):
+ yield output_name, output_data
+ except Exception as ex:
+ if isinstance(ex, BlockError):
+ raise ex
+ else:
+ raise (
+ BlockExecutionError
+ if isinstance(ex, ValueError)
+ else BlockUnknownError
+ )(
+ message=str(ex),
+ block_name=self.name,
+ block_id=self.id,
+ ) from ex
+
+ async def is_block_exec_need_review(
+ self,
+ input_data: BlockInput,
+ *,
+ user_id: str,
+ node_id: str,
+ node_exec_id: str,
+ graph_exec_id: str,
+ graph_id: str,
+ graph_version: int,
+ execution_context: "ExecutionContext",
+ **kwargs,
+ ) -> tuple[bool, BlockInput]:
+ """
+ Check if this block execution needs human review and handle the review process.
+
+ Returns:
+ Tuple of (should_pause, input_data_to_use)
+ - should_pause: True if execution should be paused for review
+ - input_data_to_use: The input data to use (may be modified by reviewer)
+ """
+ if not (
+ self.is_sensitive_action and execution_context.sensitive_action_safe_mode
+ ):
+ return False, input_data
+
+ from backend.blocks.helpers.review import HITLReviewHelper
+
+ # Handle the review request and get decision
+ decision = await HITLReviewHelper.handle_review_decision(
+ input_data=input_data,
+ user_id=user_id,
+ node_id=node_id,
+ node_exec_id=node_exec_id,
+ graph_exec_id=graph_exec_id,
+ graph_id=graph_id,
+ graph_version=graph_version,
+ block_name=self.name,
+ editable=True,
+ )
+
+ if decision is None:
+ # We're awaiting review - pause execution
+ return True, input_data
+
+ if not decision.should_proceed:
+ # Review was rejected, raise an error to stop execution
+ raise BlockExecutionError(
+ message=f"Block execution rejected by reviewer: {decision.message}",
+ block_name=self.name,
+ block_id=self.id,
+ )
+
+ # Review was approved - use the potentially modified data
+ # ReviewResult.data must be a dict for block inputs
+ reviewed_data = decision.review_result.data
+ if not isinstance(reviewed_data, dict):
+ raise BlockExecutionError(
+ message=f"Review data must be a dict for block input, got {type(reviewed_data).__name__}",
+ block_name=self.name,
+ block_id=self.id,
+ )
+ return False, reviewed_data
+
+ async def _execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
+ # Check for review requirement only if running within a graph execution context
+ # Direct block execution (e.g., from chat) skips the review process
+ has_graph_context = all(
+ key in kwargs
+ for key in (
+ "node_exec_id",
+ "graph_exec_id",
+ "graph_id",
+ "execution_context",
+ )
+ )
+ if has_graph_context:
+ should_pause, input_data = await self.is_block_exec_need_review(
+ input_data, **kwargs
+ )
+ if should_pause:
+ return
+
+ # Validate the input data (original or reviewer-modified) once
+ if error := self.input_schema.validate_data(input_data):
+ raise BlockInputError(
+ message=f"Unable to execute block with invalid input data: {error}",
+ block_name=self.name,
+ block_id=self.id,
+ )
+
+ # Use the validated input data
+ async for output_name, output_data in self.run(
+ self.input_schema(**{k: v for k, v in input_data.items() if v is not None}),
+ **kwargs,
+ ):
+ if output_name == "error":
+ raise BlockExecutionError(
+ message=output_data, block_name=self.name, block_id=self.id
+ )
+ if self.block_type == BlockType.STANDARD and (
+ error := self.output_schema.validate_field(output_name, output_data)
+ ):
+ raise BlockOutputError(
+ message=f"Block produced an invalid output data: {error}",
+ block_name=self.name,
+ block_id=self.id,
+ )
+ yield output_name, output_data
+
+ def is_triggered_by_event_type(
+ self, trigger_config: dict[str, Any], event_type: str
+ ) -> bool:
+ if not self.webhook_config:
+ raise TypeError("This method can't be used on non-trigger blocks")
+ if not self.webhook_config.event_filter_input:
+ return True
+ event_filter = trigger_config.get(self.webhook_config.event_filter_input)
+ if not event_filter:
+ raise ValueError("Event filter is not configured on trigger")
+ return event_type in [
+ self.webhook_config.event_format.format(event=k)
+ for k in event_filter
+ if event_filter[k] is True
+ ]
+
+
+# Type alias for any block with standard input/output schemas
+AnyBlockSchema: TypeAlias = Block[BlockSchemaInput, BlockSchemaOutput]
diff --git a/autogpt_platform/backend/backend/blocks/_utils.py b/autogpt_platform/backend/backend/blocks/_utils.py
new file mode 100644
index 0000000000..bec033bd2c
--- /dev/null
+++ b/autogpt_platform/backend/backend/blocks/_utils.py
@@ -0,0 +1,122 @@
+import logging
+import os
+
+from backend.integrations.providers import ProviderName
+
+from ._base import AnyBlockSchema
+
+logger = logging.getLogger(__name__)
+
+
+def is_block_auth_configured(
+ block_cls: type[AnyBlockSchema],
+) -> bool:
+ """
+ Check if a block has a valid authentication method configured at runtime.
+
+ For example if a block is an OAuth-only block and there env vars are not set,
+ do not show it in the UI.
+
+ """
+ from backend.sdk.registry import AutoRegistry
+
+ # Create an instance to access input_schema
+ try:
+ block = block_cls()
+ except Exception as e:
+ # If we can't create a block instance, assume it's not OAuth-only
+ logger.error(f"Error creating block instance for {block_cls.__name__}: {e}")
+ return True
+ logger.debug(
+ f"Checking if block {block_cls.__name__} has a valid provider configured"
+ )
+
+ # Get all credential inputs from input schema
+ credential_inputs = block.input_schema.get_credentials_fields_info()
+ required_inputs = block.input_schema.get_required_fields()
+ if not credential_inputs:
+ logger.debug(
+ f"Block {block_cls.__name__} has no credential inputs - Treating as valid"
+ )
+ return True
+
+ # Check credential inputs
+ if len(required_inputs.intersection(credential_inputs.keys())) == 0:
+ logger.debug(
+ f"Block {block_cls.__name__} has only optional credential inputs"
+ " - will work without credentials configured"
+ )
+
+ # Check if the credential inputs for this block are correctly configured
+ for field_name, field_info in credential_inputs.items():
+ provider_names = field_info.provider
+ if not provider_names:
+ logger.warning(
+ f"Block {block_cls.__name__} "
+ f"has credential input '{field_name}' with no provider options"
+ " - Disabling"
+ )
+ return False
+
+ # If a field has multiple possible providers, each one needs to be usable to
+ # prevent breaking the UX
+ for _provider_name in provider_names:
+ provider_name = _provider_name.value
+ if provider_name in ProviderName.__members__.values():
+ logger.debug(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"provider '{provider_name}' is part of the legacy provider system"
+ " - Treating as valid"
+ )
+ break
+
+ provider = AutoRegistry.get_provider(provider_name)
+ if not provider:
+ logger.warning(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"refers to unknown provider '{provider_name}' - Disabling"
+ )
+ return False
+
+ # Check the provider's supported auth types
+ if field_info.supported_types != provider.supported_auth_types:
+ logger.warning(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"has mismatched supported auth types (field <> Provider): "
+ f"{field_info.supported_types} != {provider.supported_auth_types}"
+ )
+
+ if not (supported_auth_types := provider.supported_auth_types):
+ # No auth methods are been configured for this provider
+ logger.warning(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"provider '{provider_name}' "
+ "has no authentication methods configured - Disabling"
+ )
+ return False
+
+ # Check if provider supports OAuth
+ if "oauth2" in supported_auth_types:
+ # Check if OAuth environment variables are set
+ if (oauth_config := provider.oauth_config) and bool(
+ os.getenv(oauth_config.client_id_env_var)
+ and os.getenv(oauth_config.client_secret_env_var)
+ ):
+ logger.debug(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"provider '{provider_name}' is configured for OAuth"
+ )
+ else:
+ logger.error(
+ f"Block {block_cls.__name__} credential input '{field_name}' "
+ f"provider '{provider_name}' "
+ "is missing OAuth client ID or secret - Disabling"
+ )
+ return False
+
+ logger.debug(
+ f"Block {block_cls.__name__} credential input '{field_name}' is valid; "
+ f"supported credential types: {', '.join(field_info.supported_types)}"
+ )
+
+ return True
diff --git a/autogpt_platform/backend/backend/blocks/agent.py b/autogpt_platform/backend/backend/blocks/agent.py
index 0efc0a3369..574dbc2530 100644
--- a/autogpt_platform/backend/backend/blocks/agent.py
+++ b/autogpt_platform/backend/backend/blocks/agent.py
@@ -1,7 +1,7 @@
import logging
-from typing import Any, Optional
+from typing import TYPE_CHECKING, Any, Optional
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockInput,
@@ -9,13 +9,15 @@ from backend.data.block import (
BlockSchema,
BlockSchemaInput,
BlockType,
- get_block,
)
from backend.data.execution import ExecutionContext, ExecutionStatus, NodesInputMasks
from backend.data.model import NodeExecutionStats, SchemaField
from backend.util.json import validate_with_jsonschema
from backend.util.retry import func_retry
+if TYPE_CHECKING:
+ from backend.executor.utils import LogMetadata
+
_logger = logging.getLogger(__name__)
@@ -124,9 +126,10 @@ class AgentExecutorBlock(Block):
graph_version: int,
graph_exec_id: str,
user_id: str,
- logger,
+ logger: "LogMetadata",
) -> BlockOutput:
+ from backend.blocks import get_block
from backend.data.execution import ExecutionEventType
from backend.executor import utils as execution_utils
@@ -198,7 +201,7 @@ class AgentExecutorBlock(Block):
self,
graph_exec_id: str,
user_id: str,
- logger,
+ logger: "LogMetadata",
) -> None:
from backend.executor import utils as execution_utils
diff --git a/autogpt_platform/backend/backend/blocks/ai_condition.py b/autogpt_platform/backend/backend/blocks/ai_condition.py
index 2a5cdcdeec..c28c1e9f7d 100644
--- a/autogpt_platform/backend/backend/blocks/ai_condition.py
+++ b/autogpt_platform/backend/backend/blocks/ai_condition.py
@@ -1,5 +1,11 @@
from typing import Any
+from backend.blocks._base import (
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.llm import (
DEFAULT_LLM_MODEL,
TEST_CREDENTIALS,
@@ -11,12 +17,6 @@ from backend.blocks.llm import (
LLMResponse,
llm_call,
)
-from backend.data.block import (
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import APIKeyCredentials, NodeExecutionStats, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/ai_image_customizer.py b/autogpt_platform/backend/backend/blocks/ai_image_customizer.py
index 91be33a60e..402e520ea0 100644
--- a/autogpt_platform/backend/backend/blocks/ai_image_customizer.py
+++ b/autogpt_platform/backend/backend/blocks/ai_image_customizer.py
@@ -6,7 +6,7 @@ from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
from replicate.helpers import FileOutput
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/ai_image_generator_block.py b/autogpt_platform/backend/backend/blocks/ai_image_generator_block.py
index e40731cd97..fcea24fb01 100644
--- a/autogpt_platform/backend/backend/blocks/ai_image_generator_block.py
+++ b/autogpt_platform/backend/backend/blocks/ai_image_generator_block.py
@@ -5,7 +5,12 @@ from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
from replicate.helpers import FileOutput
-from backend.data.block import Block, BlockCategory, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.data.execution import ExecutionContext
from backend.data.model import (
APIKeyCredentials,
diff --git a/autogpt_platform/backend/backend/blocks/ai_music_generator.py b/autogpt_platform/backend/backend/blocks/ai_music_generator.py
index 1ecb78f95e..9a0639a9c0 100644
--- a/autogpt_platform/backend/backend/blocks/ai_music_generator.py
+++ b/autogpt_platform/backend/backend/blocks/ai_music_generator.py
@@ -6,7 +6,7 @@ from typing import Literal
from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py b/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py
index eb60843185..2c53748fde 100644
--- a/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py
+++ b/autogpt_platform/backend/backend/blocks/ai_shortform_video_block.py
@@ -6,7 +6,7 @@ from typing import Literal
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/apollo/organization.py b/autogpt_platform/backend/backend/blocks/apollo/organization.py
index 93acbff0b8..6722de4a79 100644
--- a/autogpt_platform/backend/backend/blocks/apollo/organization.py
+++ b/autogpt_platform/backend/backend/blocks/apollo/organization.py
@@ -1,3 +1,10 @@
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.apollo._api import ApolloClient
from backend.blocks.apollo._auth import (
TEST_CREDENTIALS,
@@ -10,13 +17,6 @@ from backend.blocks.apollo.models import (
PrimaryPhone,
SearchOrganizationsRequest,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/apollo/people.py b/autogpt_platform/backend/backend/blocks/apollo/people.py
index a58321ecfc..b5059a2a26 100644
--- a/autogpt_platform/backend/backend/blocks/apollo/people.py
+++ b/autogpt_platform/backend/backend/blocks/apollo/people.py
@@ -1,5 +1,12 @@
import asyncio
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.apollo._api import ApolloClient
from backend.blocks.apollo._auth import (
TEST_CREDENTIALS,
@@ -14,13 +21,6 @@ from backend.blocks.apollo.models import (
SearchPeopleRequest,
SenorityLevels,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/apollo/person.py b/autogpt_platform/backend/backend/blocks/apollo/person.py
index 84b86d2bfd..4d586175e0 100644
--- a/autogpt_platform/backend/backend/blocks/apollo/person.py
+++ b/autogpt_platform/backend/backend/blocks/apollo/person.py
@@ -1,3 +1,10 @@
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.apollo._api import ApolloClient
from backend.blocks.apollo._auth import (
TEST_CREDENTIALS,
@@ -6,13 +13,6 @@ from backend.blocks.apollo._auth import (
ApolloCredentialsInput,
)
from backend.blocks.apollo.models import Contact, EnrichPersonRequest
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/ayrshare/_util.py b/autogpt_platform/backend/backend/blocks/ayrshare/_util.py
index 8d0b9914f9..231239310f 100644
--- a/autogpt_platform/backend/backend/blocks/ayrshare/_util.py
+++ b/autogpt_platform/backend/backend/blocks/ayrshare/_util.py
@@ -3,7 +3,7 @@ from typing import Optional
from pydantic import BaseModel, Field
-from backend.data.block import BlockSchemaInput
+from backend.blocks._base import BlockSchemaInput
from backend.data.model import SchemaField, UserIntegrations
from backend.integrations.ayrshare import AyrshareClient
from backend.util.clients import get_database_manager_async_client
diff --git a/autogpt_platform/backend/backend/blocks/basic.py b/autogpt_platform/backend/backend/blocks/basic.py
index 95193b3feb..f129d2707b 100644
--- a/autogpt_platform/backend/backend/blocks/basic.py
+++ b/autogpt_platform/backend/backend/blocks/basic.py
@@ -1,7 +1,7 @@
import enum
from typing import Any
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/block.py b/autogpt_platform/backend/backend/blocks/block.py
index 95c92a41ab..d3f482fc65 100644
--- a/autogpt_platform/backend/backend/blocks/block.py
+++ b/autogpt_platform/backend/backend/blocks/block.py
@@ -2,7 +2,7 @@ import os
import re
from typing import Type
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/branching.py b/autogpt_platform/backend/backend/blocks/branching.py
index e9177a8b65..fa4d8089ff 100644
--- a/autogpt_platform/backend/backend/blocks/branching.py
+++ b/autogpt_platform/backend/backend/blocks/branching.py
@@ -1,7 +1,7 @@
from enum import Enum
from typing import Any
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/claude_code.py b/autogpt_platform/backend/backend/blocks/claude_code.py
index 4ef44603b2..1919406c6f 100644
--- a/autogpt_platform/backend/backend/blocks/claude_code.py
+++ b/autogpt_platform/backend/backend/blocks/claude_code.py
@@ -6,7 +6,7 @@ from typing import Literal, Optional
from e2b import AsyncSandbox as BaseAsyncSandbox
from pydantic import BaseModel, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/code_executor.py b/autogpt_platform/backend/backend/blocks/code_executor.py
index be6f2bba55..766f44b7bb 100644
--- a/autogpt_platform/backend/backend/blocks/code_executor.py
+++ b/autogpt_platform/backend/backend/blocks/code_executor.py
@@ -6,7 +6,7 @@ from e2b_code_interpreter import Result as E2BExecutionResult
from e2b_code_interpreter.charts import Chart as E2BExecutionResultChart
from pydantic import BaseModel, Field, JsonValue, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/code_extraction_block.py b/autogpt_platform/backend/backend/blocks/code_extraction_block.py
index 98f40c7a8b..bde4bc9fc6 100644
--- a/autogpt_platform/backend/backend/blocks/code_extraction_block.py
+++ b/autogpt_platform/backend/backend/blocks/code_extraction_block.py
@@ -1,6 +1,6 @@
import re
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/codex.py b/autogpt_platform/backend/backend/blocks/codex.py
index 1b907cafce..07dffec39f 100644
--- a/autogpt_platform/backend/backend/blocks/codex.py
+++ b/autogpt_platform/backend/backend/blocks/codex.py
@@ -6,7 +6,7 @@ from openai import AsyncOpenAI
from openai.types.responses import Response as OpenAIResponse
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/compass/triggers.py b/autogpt_platform/backend/backend/blocks/compass/triggers.py
index f6ac8dfd81..2afd03852e 100644
--- a/autogpt_platform/backend/backend/blocks/compass/triggers.py
+++ b/autogpt_platform/backend/backend/blocks/compass/triggers.py
@@ -1,6 +1,6 @@
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockManualWebhookConfig,
diff --git a/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py b/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py
index 20a5077a2d..041f1bfaa1 100644
--- a/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py
+++ b/autogpt_platform/backend/backend/blocks/count_words_and_char_block.py
@@ -1,4 +1,4 @@
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/data_manipulation.py b/autogpt_platform/backend/backend/blocks/data_manipulation.py
index 1014236b8c..a8f25ecb18 100644
--- a/autogpt_platform/backend/backend/blocks/data_manipulation.py
+++ b/autogpt_platform/backend/backend/blocks/data_manipulation.py
@@ -1,6 +1,6 @@
from typing import Any, List
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/decoder_block.py b/autogpt_platform/backend/backend/blocks/decoder_block.py
index 7a7406bd1a..b9eb56e48f 100644
--- a/autogpt_platform/backend/backend/blocks/decoder_block.py
+++ b/autogpt_platform/backend/backend/blocks/decoder_block.py
@@ -1,6 +1,6 @@
import codecs
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/discord/bot_blocks.py b/autogpt_platform/backend/backend/blocks/discord/bot_blocks.py
index 4438af1955..4ec3d0eec2 100644
--- a/autogpt_platform/backend/backend/blocks/discord/bot_blocks.py
+++ b/autogpt_platform/backend/backend/blocks/discord/bot_blocks.py
@@ -8,7 +8,7 @@ from typing import Any, Literal, cast
import discord
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/discord/oauth_blocks.py b/autogpt_platform/backend/backend/blocks/discord/oauth_blocks.py
index ca20eb6337..74e9229776 100644
--- a/autogpt_platform/backend/backend/blocks/discord/oauth_blocks.py
+++ b/autogpt_platform/backend/backend/blocks/discord/oauth_blocks.py
@@ -2,7 +2,7 @@
Discord OAuth-based blocks.
"""
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/email_block.py b/autogpt_platform/backend/backend/blocks/email_block.py
index fad2f411cb..626bb6cdac 100644
--- a/autogpt_platform/backend/backend/blocks/email_block.py
+++ b/autogpt_platform/backend/backend/blocks/email_block.py
@@ -7,7 +7,7 @@ from typing import Literal
from pydantic import BaseModel, ConfigDict, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/encoder_block.py b/autogpt_platform/backend/backend/blocks/encoder_block.py
index b60a4ae828..bfab8f4555 100644
--- a/autogpt_platform/backend/backend/blocks/encoder_block.py
+++ b/autogpt_platform/backend/backend/blocks/encoder_block.py
@@ -2,7 +2,7 @@
import codecs
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py b/autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py
index 974ad28eed..de06230c00 100644
--- a/autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py
+++ b/autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py
@@ -8,7 +8,7 @@ which provides access to LinkedIn profile data and related information.
import logging
from typing import Optional
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/fal/ai_video_generator.py b/autogpt_platform/backend/backend/blocks/fal/ai_video_generator.py
index c2079ef159..945e53578c 100644
--- a/autogpt_platform/backend/backend/blocks/fal/ai_video_generator.py
+++ b/autogpt_platform/backend/backend/blocks/fal/ai_video_generator.py
@@ -3,6 +3,13 @@ import logging
from enum import Enum
from typing import Any
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.fal._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -10,13 +17,6 @@ from backend.blocks.fal._auth import (
FalCredentialsField,
FalCredentialsInput,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.file import store_media_file
diff --git a/autogpt_platform/backend/backend/blocks/flux_kontext.py b/autogpt_platform/backend/backend/blocks/flux_kontext.py
index d56baa6d92..f2b35aee40 100644
--- a/autogpt_platform/backend/backend/blocks/flux_kontext.py
+++ b/autogpt_platform/backend/backend/blocks/flux_kontext.py
@@ -5,7 +5,7 @@ from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
from replicate.helpers import FileOutput
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/checks.py b/autogpt_platform/backend/backend/blocks/github/checks.py
index 02bc8d2400..99feefec88 100644
--- a/autogpt_platform/backend/backend/blocks/github/checks.py
+++ b/autogpt_platform/backend/backend/blocks/github/checks.py
@@ -3,7 +3,7 @@ from typing import Optional
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/ci.py b/autogpt_platform/backend/backend/blocks/github/ci.py
index 8ba58e389e..c717be96e7 100644
--- a/autogpt_platform/backend/backend/blocks/github/ci.py
+++ b/autogpt_platform/backend/backend/blocks/github/ci.py
@@ -5,7 +5,7 @@ from typing import Optional
from typing_extensions import TypedDict
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/issues.py b/autogpt_platform/backend/backend/blocks/github/issues.py
index 22b4149663..7269c44f73 100644
--- a/autogpt_platform/backend/backend/blocks/github/issues.py
+++ b/autogpt_platform/backend/backend/blocks/github/issues.py
@@ -3,7 +3,7 @@ from urllib.parse import urlparse
from typing_extensions import TypedDict
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/pull_requests.py b/autogpt_platform/backend/backend/blocks/github/pull_requests.py
index 9049037716..b336c7bfa3 100644
--- a/autogpt_platform/backend/backend/blocks/github/pull_requests.py
+++ b/autogpt_platform/backend/backend/blocks/github/pull_requests.py
@@ -2,7 +2,7 @@ import re
from typing_extensions import TypedDict
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/repo.py b/autogpt_platform/backend/backend/blocks/github/repo.py
index 78ce26bfad..9b1e60b00c 100644
--- a/autogpt_platform/backend/backend/blocks/github/repo.py
+++ b/autogpt_platform/backend/backend/blocks/github/repo.py
@@ -2,7 +2,7 @@ import base64
from typing_extensions import TypedDict
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/reviews.py b/autogpt_platform/backend/backend/blocks/github/reviews.py
index 11718d1402..932362c09a 100644
--- a/autogpt_platform/backend/backend/blocks/github/reviews.py
+++ b/autogpt_platform/backend/backend/blocks/github/reviews.py
@@ -4,7 +4,7 @@ from typing import Any, List, Optional
from typing_extensions import TypedDict
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/statuses.py b/autogpt_platform/backend/backend/blocks/github/statuses.py
index 42826a8a51..caa1282a9b 100644
--- a/autogpt_platform/backend/backend/blocks/github/statuses.py
+++ b/autogpt_platform/backend/backend/blocks/github/statuses.py
@@ -3,7 +3,7 @@ from typing import Optional
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/github/triggers.py b/autogpt_platform/backend/backend/blocks/github/triggers.py
index 2fc568a468..e35dbb4123 100644
--- a/autogpt_platform/backend/backend/blocks/github/triggers.py
+++ b/autogpt_platform/backend/backend/blocks/github/triggers.py
@@ -4,7 +4,7 @@ from pathlib import Path
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/google/calendar.py b/autogpt_platform/backend/backend/blocks/google/calendar.py
index 55c41f047c..b9fda2cf31 100644
--- a/autogpt_platform/backend/backend/blocks/google/calendar.py
+++ b/autogpt_platform/backend/backend/blocks/google/calendar.py
@@ -8,7 +8,7 @@ from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/google/docs.py b/autogpt_platform/backend/backend/blocks/google/docs.py
index 7840cbae73..33aab4638d 100644
--- a/autogpt_platform/backend/backend/blocks/google/docs.py
+++ b/autogpt_platform/backend/backend/blocks/google/docs.py
@@ -7,14 +7,14 @@ from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from gravitas_md2gdocs import to_requests
-from backend.blocks.google._drive import GoogleDriveFile, GoogleDriveFileField
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.google._drive import GoogleDriveFile, GoogleDriveFileField
from backend.data.model import SchemaField
from backend.util.settings import Settings
diff --git a/autogpt_platform/backend/backend/blocks/google/gmail.py b/autogpt_platform/backend/backend/blocks/google/gmail.py
index 2040cabe3f..2051f86b9e 100644
--- a/autogpt_platform/backend/backend/blocks/google/gmail.py
+++ b/autogpt_platform/backend/backend/blocks/google/gmail.py
@@ -14,7 +14,7 @@ from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
from pydantic import BaseModel, Field
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/google/sheets.py b/autogpt_platform/backend/backend/blocks/google/sheets.py
index da541d3bf5..6e21008a23 100644
--- a/autogpt_platform/backend/backend/blocks/google/sheets.py
+++ b/autogpt_platform/backend/backend/blocks/google/sheets.py
@@ -7,14 +7,14 @@ from enum import Enum
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
-from backend.blocks.google._drive import GoogleDriveFile, GoogleDriveFileField
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.google._drive import GoogleDriveFile, GoogleDriveFileField
from backend.data.model import SchemaField
from backend.util.settings import Settings
diff --git a/autogpt_platform/backend/backend/blocks/google_maps.py b/autogpt_platform/backend/backend/blocks/google_maps.py
index 2ee2959326..bab0841c5d 100644
--- a/autogpt_platform/backend/backend/blocks/google_maps.py
+++ b/autogpt_platform/backend/backend/blocks/google_maps.py
@@ -3,7 +3,7 @@ from typing import Literal
import googlemaps
from pydantic import BaseModel, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/helpers/review.py b/autogpt_platform/backend/backend/blocks/helpers/review.py
index 4bd85e424b..23d1af6db3 100644
--- a/autogpt_platform/backend/backend/blocks/helpers/review.py
+++ b/autogpt_platform/backend/backend/blocks/helpers/review.py
@@ -9,9 +9,7 @@ from typing import Any, Optional
from prisma.enums import ReviewStatus
from pydantic import BaseModel
-from backend.data.execution import ExecutionStatus
from backend.data.human_review import ReviewResult
-from backend.executor.manager import async_update_node_execution_status
from backend.util.clients import get_database_manager_async_client
logger = logging.getLogger(__name__)
@@ -43,6 +41,8 @@ class HITLReviewHelper:
@staticmethod
async def update_node_execution_status(**kwargs) -> None:
"""Update the execution status of a node."""
+ from backend.executor.manager import async_update_node_execution_status
+
await async_update_node_execution_status(
db_client=get_database_manager_async_client(), **kwargs
)
@@ -88,12 +88,13 @@ class HITLReviewHelper:
Raises:
Exception: If review creation or status update fails
"""
+ from backend.data.execution import ExecutionStatus
+
# Note: Safe mode checks (human_in_the_loop_safe_mode, sensitive_action_safe_mode)
# are handled by the caller:
# - HITL blocks check human_in_the_loop_safe_mode in their run() method
# - Sensitive action blocks check sensitive_action_safe_mode in is_block_exec_need_review()
# This function only handles checking for existing approvals.
-
# Check if this node has already been approved (normal or auto-approval)
if approval_result := await HITLReviewHelper.check_approval(
node_exec_id=node_exec_id,
diff --git a/autogpt_platform/backend/backend/blocks/http.py b/autogpt_platform/backend/backend/blocks/http.py
index 77e7fe243f..21c2964412 100644
--- a/autogpt_platform/backend/backend/blocks/http.py
+++ b/autogpt_platform/backend/backend/blocks/http.py
@@ -8,7 +8,7 @@ from typing import Literal
import aiofiles
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/hubspot/company.py b/autogpt_platform/backend/backend/blocks/hubspot/company.py
index dee9169e59..543d16db0c 100644
--- a/autogpt_platform/backend/backend/blocks/hubspot/company.py
+++ b/autogpt_platform/backend/backend/blocks/hubspot/company.py
@@ -1,15 +1,15 @@
-from backend.blocks.hubspot._auth import (
- HubSpotCredentials,
- HubSpotCredentialsField,
- HubSpotCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.hubspot._auth import (
+ HubSpotCredentials,
+ HubSpotCredentialsField,
+ HubSpotCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/hubspot/contact.py b/autogpt_platform/backend/backend/blocks/hubspot/contact.py
index b4451c3b8b..1cdbf99b39 100644
--- a/autogpt_platform/backend/backend/blocks/hubspot/contact.py
+++ b/autogpt_platform/backend/backend/blocks/hubspot/contact.py
@@ -1,15 +1,15 @@
-from backend.blocks.hubspot._auth import (
- HubSpotCredentials,
- HubSpotCredentialsField,
- HubSpotCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.hubspot._auth import (
+ HubSpotCredentials,
+ HubSpotCredentialsField,
+ HubSpotCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/hubspot/engagement.py b/autogpt_platform/backend/backend/blocks/hubspot/engagement.py
index 683607c5b3..9408a543b6 100644
--- a/autogpt_platform/backend/backend/blocks/hubspot/engagement.py
+++ b/autogpt_platform/backend/backend/blocks/hubspot/engagement.py
@@ -1,17 +1,17 @@
from datetime import datetime, timedelta
-from backend.blocks.hubspot._auth import (
- HubSpotCredentials,
- HubSpotCredentialsField,
- HubSpotCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.hubspot._auth import (
+ HubSpotCredentials,
+ HubSpotCredentialsField,
+ HubSpotCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/human_in_the_loop.py b/autogpt_platform/backend/backend/blocks/human_in_the_loop.py
index d31f90ec81..69c52081d8 100644
--- a/autogpt_platform/backend/backend/blocks/human_in_the_loop.py
+++ b/autogpt_platform/backend/backend/blocks/human_in_the_loop.py
@@ -3,8 +3,7 @@ from typing import Any
from prisma.enums import ReviewStatus
-from backend.blocks.helpers.review import HITLReviewHelper
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
@@ -12,6 +11,7 @@ from backend.data.block import (
BlockSchemaOutput,
BlockType,
)
+from backend.blocks.helpers.review import HITLReviewHelper
from backend.data.execution import ExecutionContext
from backend.data.human_review import ReviewResult
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/ideogram.py b/autogpt_platform/backend/backend/blocks/ideogram.py
index 09a384c74a..5aed4aa5a9 100644
--- a/autogpt_platform/backend/backend/blocks/ideogram.py
+++ b/autogpt_platform/backend/backend/blocks/ideogram.py
@@ -3,7 +3,7 @@ from typing import Any, Dict, Literal, Optional
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/io.py b/autogpt_platform/backend/backend/blocks/io.py
index a9c3859490..94542790ef 100644
--- a/autogpt_platform/backend/backend/blocks/io.py
+++ b/autogpt_platform/backend/backend/blocks/io.py
@@ -2,9 +2,7 @@ import copy
from datetime import date, time
from typing import Any, Optional
-# Import for Google Drive file input block
-from backend.blocks.google._drive import AttachmentView, GoogleDriveFile
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
@@ -12,6 +10,9 @@ from backend.data.block import (
BlockSchemaInput,
BlockType,
)
+
+# Import for Google Drive file input block
+from backend.blocks.google._drive import AttachmentView, GoogleDriveFile
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.file import store_media_file
diff --git a/autogpt_platform/backend/backend/blocks/iteration.py b/autogpt_platform/backend/backend/blocks/iteration.py
index 441f73fc4a..a35bcac9c1 100644
--- a/autogpt_platform/backend/backend/blocks/iteration.py
+++ b/autogpt_platform/backend/backend/blocks/iteration.py
@@ -1,6 +1,6 @@
from typing import Any
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/jina/chunking.py b/autogpt_platform/backend/backend/blocks/jina/chunking.py
index 9a9b242aae..c248e3dd24 100644
--- a/autogpt_platform/backend/backend/blocks/jina/chunking.py
+++ b/autogpt_platform/backend/backend/blocks/jina/chunking.py
@@ -1,15 +1,15 @@
-from backend.blocks.jina._auth import (
- JinaCredentials,
- JinaCredentialsField,
- JinaCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.jina._auth import (
+ JinaCredentials,
+ JinaCredentialsField,
+ JinaCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/jina/embeddings.py b/autogpt_platform/backend/backend/blocks/jina/embeddings.py
index 0f6cf68c6c..f787de03b3 100644
--- a/autogpt_platform/backend/backend/blocks/jina/embeddings.py
+++ b/autogpt_platform/backend/backend/blocks/jina/embeddings.py
@@ -1,15 +1,15 @@
-from backend.blocks.jina._auth import (
- JinaCredentials,
- JinaCredentialsField,
- JinaCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.jina._auth import (
+ JinaCredentials,
+ JinaCredentialsField,
+ JinaCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/jina/fact_checker.py b/autogpt_platform/backend/backend/blocks/jina/fact_checker.py
index 3367ab99e6..df73ef94b1 100644
--- a/autogpt_platform/backend/backend/blocks/jina/fact_checker.py
+++ b/autogpt_platform/backend/backend/blocks/jina/fact_checker.py
@@ -3,18 +3,18 @@ from urllib.parse import quote
from typing_extensions import TypedDict
-from backend.blocks.jina._auth import (
- JinaCredentials,
- JinaCredentialsField,
- JinaCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.jina._auth import (
+ JinaCredentials,
+ JinaCredentialsField,
+ JinaCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
diff --git a/autogpt_platform/backend/backend/blocks/jina/search.py b/autogpt_platform/backend/backend/blocks/jina/search.py
index 05cddcc1df..22a883fa03 100644
--- a/autogpt_platform/backend/backend/blocks/jina/search.py
+++ b/autogpt_platform/backend/backend/blocks/jina/search.py
@@ -1,5 +1,12 @@
from urllib.parse import quote
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.jina._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -8,13 +15,6 @@ from backend.blocks.jina._auth import (
JinaCredentialsInput,
)
from backend.blocks.search import GetRequest
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
from backend.util.exceptions import BlockExecutionError
diff --git a/autogpt_platform/backend/backend/blocks/llm.py b/autogpt_platform/backend/backend/blocks/llm.py
index 7a020593d7..1272a9ec1b 100644
--- a/autogpt_platform/backend/backend/blocks/llm.py
+++ b/autogpt_platform/backend/backend/blocks/llm.py
@@ -15,7 +15,7 @@ from anthropic.types import ToolParam
from groq import AsyncGroq
from pydantic import BaseModel, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/maths.py b/autogpt_platform/backend/backend/blocks/maths.py
index ad6dc67bbe..0f94075277 100644
--- a/autogpt_platform/backend/backend/blocks/maths.py
+++ b/autogpt_platform/backend/backend/blocks/maths.py
@@ -2,7 +2,7 @@ import operator
from enum import Enum
from typing import Any
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/medium.py b/autogpt_platform/backend/backend/blocks/medium.py
index d54062d3ab..f511f19329 100644
--- a/autogpt_platform/backend/backend/blocks/medium.py
+++ b/autogpt_platform/backend/backend/blocks/medium.py
@@ -3,7 +3,7 @@ from typing import List, Literal
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/mem0.py b/autogpt_platform/backend/backend/blocks/mem0.py
index b8dc11064a..ba0bd24290 100644
--- a/autogpt_platform/backend/backend/blocks/mem0.py
+++ b/autogpt_platform/backend/backend/blocks/mem0.py
@@ -3,7 +3,7 @@ from typing import Any, Literal, Optional, Union
from mem0 import MemoryClient
from pydantic import BaseModel, SecretStr
-from backend.data.block import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import (
APIKeyCredentials,
CredentialsField,
diff --git a/autogpt_platform/backend/backend/blocks/notion/create_page.py b/autogpt_platform/backend/backend/blocks/notion/create_page.py
index 5edef144e3..315730d37c 100644
--- a/autogpt_platform/backend/backend/blocks/notion/create_page.py
+++ b/autogpt_platform/backend/backend/blocks/notion/create_page.py
@@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional
from pydantic import model_validator
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/notion/read_database.py b/autogpt_platform/backend/backend/blocks/notion/read_database.py
index 5720bea2f8..7b1dcf7be4 100644
--- a/autogpt_platform/backend/backend/blocks/notion/read_database.py
+++ b/autogpt_platform/backend/backend/blocks/notion/read_database.py
@@ -2,7 +2,7 @@ from __future__ import annotations
from typing import Any, Dict, List, Optional
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/notion/read_page.py b/autogpt_platform/backend/backend/blocks/notion/read_page.py
index 400fd2a929..a2b5273ad9 100644
--- a/autogpt_platform/backend/backend/blocks/notion/read_page.py
+++ b/autogpt_platform/backend/backend/blocks/notion/read_page.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/notion/read_page_markdown.py b/autogpt_platform/backend/backend/blocks/notion/read_page_markdown.py
index 7ed87eaef9..cad3e85e79 100644
--- a/autogpt_platform/backend/backend/blocks/notion/read_page_markdown.py
+++ b/autogpt_platform/backend/backend/blocks/notion/read_page_markdown.py
@@ -1,6 +1,6 @@
from __future__ import annotations
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/notion/search.py b/autogpt_platform/backend/backend/blocks/notion/search.py
index 1983763537..71af844b64 100644
--- a/autogpt_platform/backend/backend/blocks/notion/search.py
+++ b/autogpt_platform/backend/backend/blocks/notion/search.py
@@ -4,7 +4,7 @@ from typing import List, Optional
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/nvidia/deepfake.py b/autogpt_platform/backend/backend/blocks/nvidia/deepfake.py
index f60b649839..06b05ebc50 100644
--- a/autogpt_platform/backend/backend/blocks/nvidia/deepfake.py
+++ b/autogpt_platform/backend/backend/blocks/nvidia/deepfake.py
@@ -1,15 +1,15 @@
-from backend.blocks.nvidia._auth import (
- NvidiaCredentials,
- NvidiaCredentialsField,
- NvidiaCredentialsInput,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.nvidia._auth import (
+ NvidiaCredentials,
+ NvidiaCredentialsField,
+ NvidiaCredentialsInput,
+)
from backend.data.model import SchemaField
from backend.util.request import Requests
from backend.util.type import MediaFileType
diff --git a/autogpt_platform/backend/backend/blocks/perplexity.py b/autogpt_platform/backend/backend/blocks/perplexity.py
index e2796718a9..270081a3a8 100644
--- a/autogpt_platform/backend/backend/blocks/perplexity.py
+++ b/autogpt_platform/backend/backend/blocks/perplexity.py
@@ -6,7 +6,7 @@ from typing import Any, Literal
import openai
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/persistence.py b/autogpt_platform/backend/backend/blocks/persistence.py
index a327fd22c7..7584993beb 100644
--- a/autogpt_platform/backend/backend/blocks/persistence.py
+++ b/autogpt_platform/backend/backend/blocks/persistence.py
@@ -1,7 +1,7 @@
import logging
from typing import Any, Literal
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/pinecone.py b/autogpt_platform/backend/backend/blocks/pinecone.py
index 878f6f72fb..f882212ab2 100644
--- a/autogpt_platform/backend/backend/blocks/pinecone.py
+++ b/autogpt_platform/backend/backend/blocks/pinecone.py
@@ -3,7 +3,7 @@ from typing import Any, Literal
from pinecone import Pinecone, ServerlessSpec
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/reddit.py b/autogpt_platform/backend/backend/blocks/reddit.py
index 1109d568db..6544c698a3 100644
--- a/autogpt_platform/backend/backend/blocks/reddit.py
+++ b/autogpt_platform/backend/backend/blocks/reddit.py
@@ -6,7 +6,7 @@ import praw
from praw.models import Comment, MoreComments, Submission
from pydantic import BaseModel, SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/replicate/flux_advanced.py b/autogpt_platform/backend/backend/blocks/replicate/flux_advanced.py
index c112ce75c4..e7a0a82cce 100644
--- a/autogpt_platform/backend/backend/blocks/replicate/flux_advanced.py
+++ b/autogpt_platform/backend/backend/blocks/replicate/flux_advanced.py
@@ -4,19 +4,19 @@ from enum import Enum
from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
-from backend.blocks.replicate._auth import (
- TEST_CREDENTIALS,
- TEST_CREDENTIALS_INPUT,
- ReplicateCredentialsInput,
-)
-from backend.blocks.replicate._helper import ReplicateOutputs, extract_result
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.replicate._auth import (
+ TEST_CREDENTIALS,
+ TEST_CREDENTIALS_INPUT,
+ ReplicateCredentialsInput,
+)
+from backend.blocks.replicate._helper import ReplicateOutputs, extract_result
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py b/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py
index 7ee054d02e..2758c7cd06 100644
--- a/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py
+++ b/autogpt_platform/backend/backend/blocks/replicate/replicate_block.py
@@ -4,19 +4,19 @@ from typing import Optional
from pydantic import SecretStr
from replicate.client import Client as ReplicateClient
-from backend.blocks.replicate._auth import (
- TEST_CREDENTIALS,
- TEST_CREDENTIALS_INPUT,
- ReplicateCredentialsInput,
-)
-from backend.blocks.replicate._helper import ReplicateOutputs, extract_result
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.replicate._auth import (
+ TEST_CREDENTIALS,
+ TEST_CREDENTIALS_INPUT,
+ ReplicateCredentialsInput,
+)
+from backend.blocks.replicate._helper import ReplicateOutputs, extract_result
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
from backend.util.exceptions import BlockExecutionError, BlockInputError
diff --git a/autogpt_platform/backend/backend/blocks/rss.py b/autogpt_platform/backend/backend/blocks/rss.py
index a23b3ee25c..5d26bc592c 100644
--- a/autogpt_platform/backend/backend/blocks/rss.py
+++ b/autogpt_platform/backend/backend/blocks/rss.py
@@ -6,7 +6,7 @@ from typing import Any
import feedparser
import pydantic
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/sampling.py b/autogpt_platform/backend/backend/blocks/sampling.py
index b4463947a7..eb5f47e80e 100644
--- a/autogpt_platform/backend/backend/blocks/sampling.py
+++ b/autogpt_platform/backend/backend/blocks/sampling.py
@@ -3,7 +3,7 @@ from collections import defaultdict
from enum import Enum
from typing import Any, Dict, List, Optional, Union
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/screenshotone.py b/autogpt_platform/backend/backend/blocks/screenshotone.py
index ee998f8da2..1ce133af83 100644
--- a/autogpt_platform/backend/backend/blocks/screenshotone.py
+++ b/autogpt_platform/backend/backend/blocks/screenshotone.py
@@ -4,7 +4,7 @@ from typing import Literal
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/search.py b/autogpt_platform/backend/backend/blocks/search.py
index 09e16034a3..61acb2108e 100644
--- a/autogpt_platform/backend/backend/blocks/search.py
+++ b/autogpt_platform/backend/backend/blocks/search.py
@@ -3,14 +3,14 @@ from urllib.parse import quote
from pydantic import SecretStr
-from backend.blocks.helpers.http import GetRequest
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.helpers.http import GetRequest
from backend.data.model import (
APIKeyCredentials,
CredentialsField,
diff --git a/autogpt_platform/backend/backend/blocks/slant3d/base.py b/autogpt_platform/backend/backend/blocks/slant3d/base.py
index e368a1b451..3ce24f8ddc 100644
--- a/autogpt_platform/backend/backend/blocks/slant3d/base.py
+++ b/autogpt_platform/backend/backend/blocks/slant3d/base.py
@@ -1,6 +1,6 @@
from typing import Any, Dict
-from backend.data.block import Block
+from backend.blocks._base import Block
from backend.util.request import Requests
from ._api import Color, CustomerDetails, OrderItem, Profile
diff --git a/autogpt_platform/backend/backend/blocks/slant3d/filament.py b/autogpt_platform/backend/backend/blocks/slant3d/filament.py
index f2b9eae38d..723ebff59e 100644
--- a/autogpt_platform/backend/backend/blocks/slant3d/filament.py
+++ b/autogpt_platform/backend/backend/blocks/slant3d/filament.py
@@ -1,6 +1,6 @@
from typing import List
-from backend.data.block import BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import APIKeyCredentials, SchemaField
from ._api import (
diff --git a/autogpt_platform/backend/backend/blocks/slant3d/order.py b/autogpt_platform/backend/backend/blocks/slant3d/order.py
index 4ece3fc51e..36d2705ea5 100644
--- a/autogpt_platform/backend/backend/blocks/slant3d/order.py
+++ b/autogpt_platform/backend/backend/blocks/slant3d/order.py
@@ -1,7 +1,7 @@
import uuid
from typing import List
-from backend.data.block import BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import APIKeyCredentials, SchemaField
from backend.util.settings import BehaveAs, Settings
diff --git a/autogpt_platform/backend/backend/blocks/slant3d/slicing.py b/autogpt_platform/backend/backend/blocks/slant3d/slicing.py
index 1952b162d2..8740f9504f 100644
--- a/autogpt_platform/backend/backend/blocks/slant3d/slicing.py
+++ b/autogpt_platform/backend/backend/blocks/slant3d/slicing.py
@@ -1,4 +1,4 @@
-from backend.data.block import BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import APIKeyCredentials, SchemaField
from ._api import (
diff --git a/autogpt_platform/backend/backend/blocks/slant3d/webhook.py b/autogpt_platform/backend/backend/blocks/slant3d/webhook.py
index e5a2d72568..f2cb86ec09 100644
--- a/autogpt_platform/backend/backend/blocks/slant3d/webhook.py
+++ b/autogpt_platform/backend/backend/blocks/slant3d/webhook.py
@@ -1,6 +1,6 @@
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/smart_decision_maker.py b/autogpt_platform/backend/backend/blocks/smart_decision_maker.py
index ff6042eaab..5e6b11eebd 100644
--- a/autogpt_platform/backend/backend/blocks/smart_decision_maker.py
+++ b/autogpt_platform/backend/backend/blocks/smart_decision_maker.py
@@ -7,8 +7,7 @@ from typing import TYPE_CHECKING, Any
from pydantic import BaseModel
import backend.blocks.llm as llm
-from backend.blocks.agent import AgentExecutorBlock
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockInput,
@@ -17,6 +16,7 @@ from backend.data.block import (
BlockSchemaOutput,
BlockType,
)
+from backend.blocks.agent import AgentExecutorBlock
from backend.data.dynamic_fields import (
extract_base_field_name,
get_dynamic_field_description,
diff --git a/autogpt_platform/backend/backend/blocks/smartlead/campaign.py b/autogpt_platform/backend/backend/blocks/smartlead/campaign.py
index c3bf930068..302a38f4db 100644
--- a/autogpt_platform/backend/backend/blocks/smartlead/campaign.py
+++ b/autogpt_platform/backend/backend/blocks/smartlead/campaign.py
@@ -1,3 +1,10 @@
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.smartlead._api import SmartLeadClient
from backend.blocks.smartlead._auth import (
TEST_CREDENTIALS,
@@ -16,13 +23,6 @@ from backend.blocks.smartlead.models import (
SaveSequencesResponse,
Sequence,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/spreadsheet.py b/autogpt_platform/backend/backend/blocks/spreadsheet.py
index a13f9e2f6d..2bbfd6776f 100644
--- a/autogpt_platform/backend/backend/blocks/spreadsheet.py
+++ b/autogpt_platform/backend/backend/blocks/spreadsheet.py
@@ -1,6 +1,6 @@
from pathlib import Path
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/system/library_operations.py b/autogpt_platform/backend/backend/blocks/system/library_operations.py
index 116da64599..b2433ce220 100644
--- a/autogpt_platform/backend/backend/blocks/system/library_operations.py
+++ b/autogpt_platform/backend/backend/blocks/system/library_operations.py
@@ -3,7 +3,7 @@ from typing import Any
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/system/store_operations.py b/autogpt_platform/backend/backend/blocks/system/store_operations.py
index e9b7a01ebe..88958a5707 100644
--- a/autogpt_platform/backend/backend/blocks/system/store_operations.py
+++ b/autogpt_platform/backend/backend/blocks/system/store_operations.py
@@ -3,7 +3,7 @@ from typing import Literal
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/talking_head.py b/autogpt_platform/backend/backend/blocks/talking_head.py
index e01e3d4023..f199d030ff 100644
--- a/autogpt_platform/backend/backend/blocks/talking_head.py
+++ b/autogpt_platform/backend/backend/blocks/talking_head.py
@@ -3,7 +3,7 @@ from typing import Literal
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/test/test_block.py b/autogpt_platform/backend/backend/blocks/test/test_block.py
index 7a1fdbcc73..c7f3ca62f2 100644
--- a/autogpt_platform/backend/backend/blocks/test/test_block.py
+++ b/autogpt_platform/backend/backend/blocks/test/test_block.py
@@ -2,7 +2,8 @@ from typing import Any, Type
import pytest
-from backend.data.block import Block, BlockSchemaInput, get_blocks
+from backend.blocks import get_blocks
+from backend.blocks._base import Block, BlockSchemaInput
from backend.data.model import SchemaField
from backend.util.test import execute_block_test
diff --git a/autogpt_platform/backend/backend/blocks/text.py b/autogpt_platform/backend/backend/blocks/text.py
index 359e22a84f..4276ff3a45 100644
--- a/autogpt_platform/backend/backend/blocks/text.py
+++ b/autogpt_platform/backend/backend/blocks/text.py
@@ -4,7 +4,7 @@ from typing import Any
import regex # Has built-in timeout support
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/text_to_speech_block.py b/autogpt_platform/backend/backend/blocks/text_to_speech_block.py
index 8fe9e1cda7..a408c8772f 100644
--- a/autogpt_platform/backend/backend/blocks/text_to_speech_block.py
+++ b/autogpt_platform/backend/backend/blocks/text_to_speech_block.py
@@ -2,7 +2,7 @@ from typing import Any, Literal
from pydantic import SecretStr
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/time_blocks.py b/autogpt_platform/backend/backend/blocks/time_blocks.py
index 3a1f4c678e..5ee13db30b 100644
--- a/autogpt_platform/backend/backend/blocks/time_blocks.py
+++ b/autogpt_platform/backend/backend/blocks/time_blocks.py
@@ -7,7 +7,7 @@ from zoneinfo import ZoneInfo
from pydantic import BaseModel
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/todoist/comments.py b/autogpt_platform/backend/backend/blocks/todoist/comments.py
index f11534cbe3..dc8eef3919 100644
--- a/autogpt_platform/backend/backend/blocks/todoist/comments.py
+++ b/autogpt_platform/backend/backend/blocks/todoist/comments.py
@@ -4,6 +4,13 @@ from pydantic import BaseModel
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -12,13 +19,6 @@ from backend.blocks.todoist._auth import (
TodoistCredentialsField,
TodoistCredentialsInput,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/todoist/labels.py b/autogpt_platform/backend/backend/blocks/todoist/labels.py
index 8107459567..0b0f26cc77 100644
--- a/autogpt_platform/backend/backend/blocks/todoist/labels.py
+++ b/autogpt_platform/backend/backend/blocks/todoist/labels.py
@@ -1,6 +1,13 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -10,13 +17,6 @@ from backend.blocks.todoist._auth import (
TodoistCredentialsInput,
)
from backend.blocks.todoist._types import Colors
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/todoist/projects.py b/autogpt_platform/backend/backend/blocks/todoist/projects.py
index c6d345c116..a35bd3d41e 100644
--- a/autogpt_platform/backend/backend/blocks/todoist/projects.py
+++ b/autogpt_platform/backend/backend/blocks/todoist/projects.py
@@ -1,6 +1,13 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -10,13 +17,6 @@ from backend.blocks.todoist._auth import (
TodoistCredentialsInput,
)
from backend.blocks.todoist._types import Colors
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/todoist/sections.py b/autogpt_platform/backend/backend/blocks/todoist/sections.py
index 52dceb70b9..23cabdb661 100644
--- a/autogpt_platform/backend/backend/blocks/todoist/sections.py
+++ b/autogpt_platform/backend/backend/blocks/todoist/sections.py
@@ -1,6 +1,13 @@
from todoist_api_python.api import TodoistAPI
from typing_extensions import Optional
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -9,13 +16,6 @@ from backend.blocks.todoist._auth import (
TodoistCredentialsField,
TodoistCredentialsInput,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/todoist/tasks.py b/autogpt_platform/backend/backend/blocks/todoist/tasks.py
index 183a3340b3..6aaf766114 100644
--- a/autogpt_platform/backend/backend/blocks/todoist/tasks.py
+++ b/autogpt_platform/backend/backend/blocks/todoist/tasks.py
@@ -4,6 +4,13 @@ from todoist_api_python.api import TodoistAPI
from todoist_api_python.models import Task
from typing_extensions import Optional
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.todoist._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -12,13 +19,6 @@ from backend.blocks.todoist._auth import (
TodoistCredentialsField,
TodoistCredentialsInput,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/_types.py b/autogpt_platform/backend/backend/blocks/twitter/_types.py
index 88050ed545..ead54677be 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/_types.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/_types.py
@@ -3,7 +3,7 @@ from enum import Enum
from pydantic import BaseModel
-from backend.data.block import BlockSchemaInput
+from backend.blocks._base import BlockSchemaInput
from backend.data.model import SchemaField
# -------------- Tweets -----------------
diff --git a/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py
index 0ce8e08535..f4b07ca53e 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/direct_message/direct_message_lookup.py
@@ -4,8 +4,8 @@
# import tweepy
# from tweepy.client import Response
+# from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchema, BlockSchemaInput, BlockSchemaOutput
# from backend.blocks.twitter._serializer import IncludesSerializer, ResponseDataSerializer
-# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockSchemaInput, BlockSchemaOutput
# from backend.data.model import SchemaField
# from backend.blocks.twitter._builders import DMExpansionsBuilder
# from backend.blocks.twitter._types import DMEventExpansion, DMEventExpansionInputs, DMEventType, DMMediaField, DMTweetField, TweetUserFields
diff --git a/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py b/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py
index cbbe019f37..0104e3e9c5 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/direct_message/manage_direct_message.py
@@ -5,7 +5,7 @@
# import tweepy
# from tweepy.client import Response
-# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockSchemaInput, BlockSchemaOutput
+# from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchema, BlockSchemaInput, BlockSchemaOutput
# from backend.data.model import SchemaField
# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
# from backend.blocks.twitter._auth import (
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py
index 5616e0ce14..93dfaef919 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_follows.py
@@ -1,6 +1,13 @@
# from typing import cast
import tweepy
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -13,13 +20,6 @@ from backend.blocks.twitter._auth import (
# from backend.blocks.twitter._builders import UserExpansionsBuilder
# from backend.blocks.twitter._types import TweetFields, TweetUserFields, UserExpansionInputs, UserExpansions
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
# from tweepy.client import Response
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py
index 6b46f00a37..a6a5607196 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_lookup.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -23,7 +24,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py
index 32ffb9e5b6..5505f1457a 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_members.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -29,13 +36,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py
index e43980683e..57dc6579c9 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/list_tweets_lookup.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -26,7 +27,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py b/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py
index 4092fbaa93..9bab05e98b 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/manage_lists.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -12,13 +19,6 @@ from backend.blocks.twitter._auth import (
TwitterCredentialsInput,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py b/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py
index 7bc5bb543f..0ebe9503b0 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/lists/pinned_lists.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -23,13 +30,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py b/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py
index bd013cecc1..a38dc5452e 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/spaces/search_spaces.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -24,7 +25,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py
index 2c99d3ba3a..c31f0efd38 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/spaces/spaces_lookup.py
@@ -4,6 +4,7 @@ import tweepy
from pydantic import BaseModel
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -36,7 +37,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py
index b69002837e..9d8bfccad9 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/bookmark.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -26,13 +33,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py
index f9992ea7c0..72ed2096a7 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py
@@ -1,5 +1,12 @@
import tweepy
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -9,13 +16,6 @@ from backend.blocks.twitter._auth import (
TwitterCredentialsInput,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py
index 2d499257a9..c2a920276c 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/like.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -31,13 +38,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py
index 875e22738b..68e379b895 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py
@@ -5,6 +5,13 @@ import tweepy
from pydantic import BaseModel
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -35,13 +42,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py
index fc6c336e20..be8d5b3125 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -27,7 +28,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py
index 1f65f90ea3..606e3b8a74 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/retweet.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -23,13 +30,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py
index 9f07beba66..347ff5aee1 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/timeline.py
@@ -4,6 +4,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -31,7 +32,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py
index 540aa1395f..f452848288 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/tweets/tweet_lookup.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -26,7 +27,6 @@ from backend.blocks.twitter._types import (
TweetUserFieldsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py b/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py
index 1c192aa6b5..12df24cfe2 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/users/blocks.py
@@ -3,6 +3,7 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -20,7 +21,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/follows.py b/autogpt_platform/backend/backend/blocks/twitter/users/follows.py
index 537aea6031..20276b19b4 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/users/follows.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/users/follows.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -23,13 +30,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py b/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py
index e22aec94dc..31927e2b71 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/users/mutes.py
@@ -3,6 +3,13 @@ from typing import cast
import tweepy
from tweepy.client import Response
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -23,13 +30,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py b/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py
index 67c7d14c9b..8d01876955 100644
--- a/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py
+++ b/autogpt_platform/backend/backend/blocks/twitter/users/user_lookup.py
@@ -4,6 +4,7 @@ import tweepy
from pydantic import BaseModel
from tweepy.client import Response
+from backend.blocks._base import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.blocks.twitter._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -24,7 +25,6 @@ from backend.blocks.twitter._types import (
UserExpansionsFilter,
)
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
-from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/video/add_audio.py b/autogpt_platform/backend/backend/blocks/video/add_audio.py
index ebd4ab94f2..f91a82a758 100644
--- a/autogpt_platform/backend/backend/blocks/video/add_audio.py
+++ b/autogpt_platform/backend/backend/blocks/video/add_audio.py
@@ -3,14 +3,14 @@
from moviepy.audio.io.AudioFileClip import AudioFileClip
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import extract_source_name, strip_chapters_inplace
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import extract_source_name, strip_chapters_inplace
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
diff --git a/autogpt_platform/backend/backend/blocks/video/clip.py b/autogpt_platform/backend/backend/blocks/video/clip.py
index 05deea6530..990a8b2f31 100644
--- a/autogpt_platform/backend/backend/blocks/video/clip.py
+++ b/autogpt_platform/backend/backend/blocks/video/clip.py
@@ -4,18 +4,18 @@ from typing import Literal
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import (
- extract_source_name,
- get_video_codecs,
- strip_chapters_inplace,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import (
+ extract_source_name,
+ get_video_codecs,
+ strip_chapters_inplace,
+)
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.exceptions import BlockExecutionError
diff --git a/autogpt_platform/backend/backend/blocks/video/concat.py b/autogpt_platform/backend/backend/blocks/video/concat.py
index b49854fb40..3bf2b5142b 100644
--- a/autogpt_platform/backend/backend/blocks/video/concat.py
+++ b/autogpt_platform/backend/backend/blocks/video/concat.py
@@ -6,18 +6,18 @@ from moviepy import concatenate_videoclips
from moviepy.video.fx import CrossFadeIn, CrossFadeOut, FadeIn, FadeOut
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import (
- extract_source_name,
- get_video_codecs,
- strip_chapters_inplace,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import (
+ extract_source_name,
+ get_video_codecs,
+ strip_chapters_inplace,
+)
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.exceptions import BlockExecutionError
diff --git a/autogpt_platform/backend/backend/blocks/video/download.py b/autogpt_platform/backend/backend/blocks/video/download.py
index 4046d5df42..c6d2617f73 100644
--- a/autogpt_platform/backend/backend/blocks/video/download.py
+++ b/autogpt_platform/backend/backend/blocks/video/download.py
@@ -9,7 +9,7 @@ import yt_dlp
if typing.TYPE_CHECKING:
from yt_dlp import _Params
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/video/duration.py b/autogpt_platform/backend/backend/blocks/video/duration.py
index 9e05d35b00..ff904ad650 100644
--- a/autogpt_platform/backend/backend/blocks/video/duration.py
+++ b/autogpt_platform/backend/backend/blocks/video/duration.py
@@ -3,14 +3,14 @@
from moviepy.audio.io.AudioFileClip import AudioFileClip
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import strip_chapters_inplace
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import strip_chapters_inplace
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
diff --git a/autogpt_platform/backend/backend/blocks/video/loop.py b/autogpt_platform/backend/backend/blocks/video/loop.py
index 461610f713..0cb360a5b2 100644
--- a/autogpt_platform/backend/backend/blocks/video/loop.py
+++ b/autogpt_platform/backend/backend/blocks/video/loop.py
@@ -5,14 +5,14 @@ from typing import Optional
from moviepy.video.fx.Loop import Loop
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import extract_source_name, strip_chapters_inplace
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import extract_source_name, strip_chapters_inplace
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
diff --git a/autogpt_platform/backend/backend/blocks/video/narration.py b/autogpt_platform/backend/backend/blocks/video/narration.py
index adf41753c8..39b9c481b0 100644
--- a/autogpt_platform/backend/backend/blocks/video/narration.py
+++ b/autogpt_platform/backend/backend/blocks/video/narration.py
@@ -8,6 +8,13 @@ from moviepy import CompositeAudioClip
from moviepy.audio.io.AudioFileClip import AudioFileClip
from moviepy.video.io.VideoFileClip import VideoFileClip
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.elevenlabs._auth import (
TEST_CREDENTIALS,
TEST_CREDENTIALS_INPUT,
@@ -19,13 +26,6 @@ from backend.blocks.video._utils import (
get_video_codecs,
strip_chapters_inplace,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.execution import ExecutionContext
from backend.data.model import CredentialsField, SchemaField
from backend.util.exceptions import BlockExecutionError
diff --git a/autogpt_platform/backend/backend/blocks/video/text_overlay.py b/autogpt_platform/backend/backend/blocks/video/text_overlay.py
index cb7cfe0420..86dd30318c 100644
--- a/autogpt_platform/backend/backend/blocks/video/text_overlay.py
+++ b/autogpt_platform/backend/backend/blocks/video/text_overlay.py
@@ -5,18 +5,18 @@ from typing import Literal
from moviepy import CompositeVideoClip, TextClip
from moviepy.video.io.VideoFileClip import VideoFileClip
-from backend.blocks.video._utils import (
- extract_source_name,
- get_video_codecs,
- strip_chapters_inplace,
-)
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
BlockSchemaInput,
BlockSchemaOutput,
)
+from backend.blocks.video._utils import (
+ extract_source_name,
+ get_video_codecs,
+ strip_chapters_inplace,
+)
from backend.data.execution import ExecutionContext
from backend.data.model import SchemaField
from backend.util.exceptions import BlockExecutionError
diff --git a/autogpt_platform/backend/backend/blocks/xml_parser.py b/autogpt_platform/backend/backend/blocks/xml_parser.py
index 223f8ea367..a1274fa562 100644
--- a/autogpt_platform/backend/backend/blocks/xml_parser.py
+++ b/autogpt_platform/backend/backend/blocks/xml_parser.py
@@ -1,7 +1,7 @@
from gravitasml.parser import Parser
from gravitasml.token import Token, tokenize
-from backend.data.block import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import SchemaField
diff --git a/autogpt_platform/backend/backend/blocks/youtube.py b/autogpt_platform/backend/backend/blocks/youtube.py
index 6d81a86b4c..6ce705e4f5 100644
--- a/autogpt_platform/backend/backend/blocks/youtube.py
+++ b/autogpt_platform/backend/backend/blocks/youtube.py
@@ -9,7 +9,7 @@ from youtube_transcript_api._transcripts import FetchedTranscript
from youtube_transcript_api.formatters import TextFormatter
from youtube_transcript_api.proxies import WebshareProxyConfig
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockOutput,
diff --git a/autogpt_platform/backend/backend/blocks/zerobounce/validate_emails.py b/autogpt_platform/backend/backend/blocks/zerobounce/validate_emails.py
index fa5283f324..6a461b4aa8 100644
--- a/autogpt_platform/backend/backend/blocks/zerobounce/validate_emails.py
+++ b/autogpt_platform/backend/backend/blocks/zerobounce/validate_emails.py
@@ -7,6 +7,13 @@ from zerobouncesdk.zb_validate_response import (
ZBValidateSubStatus,
)
+from backend.blocks._base import (
+ Block,
+ BlockCategory,
+ BlockOutput,
+ BlockSchemaInput,
+ BlockSchemaOutput,
+)
from backend.blocks.zerobounce._api import ZeroBounceClient
from backend.blocks.zerobounce._auth import (
TEST_CREDENTIALS,
@@ -14,13 +21,6 @@ from backend.blocks.zerobounce._auth import (
ZeroBounceCredentials,
ZeroBounceCredentialsInput,
)
-from backend.data.block import (
- Block,
- BlockCategory,
- BlockOutput,
- BlockSchemaInput,
- BlockSchemaOutput,
-)
from backend.data.model import CredentialsField, SchemaField
diff --git a/autogpt_platform/backend/backend/data/__init__.py b/autogpt_platform/backend/backend/data/__init__.py
index c98667e362..8b13789179 100644
--- a/autogpt_platform/backend/backend/data/__init__.py
+++ b/autogpt_platform/backend/backend/data/__init__.py
@@ -1,8 +1 @@
-from backend.api.features.library.model import LibraryAgentPreset
-from .graph import NodeModel
-from .integrations import Webhook # noqa: F401
-
-# Resolve Webhook forward references
-NodeModel.model_rebuild()
-LibraryAgentPreset.model_rebuild()
diff --git a/autogpt_platform/backend/backend/data/block.py b/autogpt_platform/backend/backend/data/block.py
index f67134ceb3..a958011bc0 100644
--- a/autogpt_platform/backend/backend/data/block.py
+++ b/autogpt_platform/backend/backend/data/block.py
@@ -1,887 +1,32 @@
-import inspect
import logging
-import os
-from abc import ABC, abstractmethod
-from collections.abc import AsyncGenerator as AsyncGen
-from enum import Enum
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- ClassVar,
- Generic,
- Optional,
- Sequence,
- Type,
- TypeAlias,
- TypeVar,
- cast,
- get_origin,
-)
+from typing import TYPE_CHECKING, Any, AsyncGenerator
-import jsonref
-import jsonschema
from prisma.models import AgentBlock
from prisma.types import AgentBlockCreateInput
-from pydantic import BaseModel
-from backend.data.model import NodeExecutionStats
-from backend.integrations.providers import ProviderName
from backend.util import json
-from backend.util.cache import cached
-from backend.util.exceptions import (
- BlockError,
- BlockExecutionError,
- BlockInputError,
- BlockOutputError,
- BlockUnknownError,
-)
-from backend.util.settings import Config
-from .model import (
- ContributorDetails,
- Credentials,
- CredentialsFieldInfo,
- CredentialsMetaInput,
- SchemaField,
- is_credentials_field_name,
-)
+if TYPE_CHECKING:
+ from backend.blocks._base import AnyBlockSchema
logger = logging.getLogger(__name__)
-if TYPE_CHECKING:
- from backend.data.execution import ExecutionContext
- from .graph import Link
-
-app_config = Config()
-
-BlockInput = dict[str, Any] # Input: 1 input pin consumes 1 data.
+BlockInput = dict[str, Any] # Input: 1 input pin <- 1 data.
BlockOutputEntry = tuple[str, Any] # Output data should be a tuple of (name, value).
-BlockOutput = AsyncGen[BlockOutputEntry, None] # Output: 1 output pin produces n data.
-BlockTestOutput = BlockOutputEntry | tuple[str, Callable[[Any], bool]]
+BlockOutput = AsyncGenerator[BlockOutputEntry, None] # Output: 1 output pin -> N data.
CompletedBlockOutput = dict[str, list[Any]] # Completed stream, collected as a dict.
-class BlockType(Enum):
- STANDARD = "Standard"
- INPUT = "Input"
- OUTPUT = "Output"
- NOTE = "Note"
- WEBHOOK = "Webhook"
- WEBHOOK_MANUAL = "Webhook (manual)"
- AGENT = "Agent"
- AI = "AI"
- AYRSHARE = "Ayrshare"
- HUMAN_IN_THE_LOOP = "Human In The Loop"
-
-
-class BlockCategory(Enum):
- AI = "Block that leverages AI to perform a task."
- SOCIAL = "Block that interacts with social media platforms."
- TEXT = "Block that processes text data."
- SEARCH = "Block that searches or extracts information from the internet."
- BASIC = "Block that performs basic operations."
- INPUT = "Block that interacts with input of the graph."
- OUTPUT = "Block that interacts with output of the graph."
- LOGIC = "Programming logic to control the flow of your agent"
- COMMUNICATION = "Block that interacts with communication platforms."
- DEVELOPER_TOOLS = "Developer tools such as GitHub blocks."
- DATA = "Block that interacts with structured data."
- HARDWARE = "Block that interacts with hardware."
- AGENT = "Block that interacts with other agents."
- CRM = "Block that interacts with CRM services."
- SAFETY = (
- "Block that provides AI safety mechanisms such as detecting harmful content"
- )
- PRODUCTIVITY = "Block that helps with productivity"
- ISSUE_TRACKING = "Block that helps with issue tracking"
- MULTIMEDIA = "Block that interacts with multimedia content"
- MARKETING = "Block that helps with marketing"
-
- def dict(self) -> dict[str, str]:
- return {"category": self.name, "description": self.value}
-
-
-class BlockCostType(str, Enum):
- RUN = "run" # cost X credits per run
- BYTE = "byte" # cost X credits per byte
- SECOND = "second" # cost X credits per second
-
-
-class BlockCost(BaseModel):
- cost_amount: int
- cost_filter: BlockInput
- cost_type: BlockCostType
-
- def __init__(
- self,
- cost_amount: int,
- cost_type: BlockCostType = BlockCostType.RUN,
- cost_filter: Optional[BlockInput] = None,
- **data: Any,
- ) -> None:
- super().__init__(
- cost_amount=cost_amount,
- cost_filter=cost_filter or {},
- cost_type=cost_type,
- **data,
- )
-
-
-class BlockInfo(BaseModel):
- id: str
- name: str
- inputSchema: dict[str, Any]
- outputSchema: dict[str, Any]
- costs: list[BlockCost]
- description: str
- categories: list[dict[str, str]]
- contributors: list[dict[str, Any]]
- staticOutput: bool
- uiType: str
-
-
-class BlockSchema(BaseModel):
- cached_jsonschema: ClassVar[dict[str, Any]]
-
- @classmethod
- def jsonschema(cls) -> dict[str, Any]:
- if cls.cached_jsonschema:
- return cls.cached_jsonschema
-
- model = jsonref.replace_refs(cls.model_json_schema(), merge_props=True)
-
- def ref_to_dict(obj):
- if isinstance(obj, dict):
- # OpenAPI <3.1 does not support sibling fields that has a $ref key
- # So sometimes, the schema has an "allOf"/"anyOf"/"oneOf" with 1 item.
- keys = {"allOf", "anyOf", "oneOf"}
- one_key = next((k for k in keys if k in obj and len(obj[k]) == 1), None)
- if one_key:
- obj.update(obj[one_key][0])
-
- return {
- key: ref_to_dict(value)
- for key, value in obj.items()
- if not key.startswith("$") and key != one_key
- }
- elif isinstance(obj, list):
- return [ref_to_dict(item) for item in obj]
-
- return obj
-
- cls.cached_jsonschema = cast(dict[str, Any], ref_to_dict(model))
-
- return cls.cached_jsonschema
-
- @classmethod
- def validate_data(cls, data: BlockInput) -> str | None:
- return json.validate_with_jsonschema(
- schema=cls.jsonschema(),
- data={k: v for k, v in data.items() if v is not None},
- )
-
- @classmethod
- def get_mismatch_error(cls, data: BlockInput) -> str | None:
- return cls.validate_data(data)
-
- @classmethod
- def get_field_schema(cls, field_name: str) -> dict[str, Any]:
- model_schema = cls.jsonschema().get("properties", {})
- if not model_schema:
- raise ValueError(f"Invalid model schema {cls}")
-
- property_schema = model_schema.get(field_name)
- if not property_schema:
- raise ValueError(f"Invalid property name {field_name}")
-
- return property_schema
-
- @classmethod
- def validate_field(cls, field_name: str, data: BlockInput) -> str | None:
- """
- Validate the data against a specific property (one of the input/output name).
- Returns the validation error message if the data does not match the schema.
- """
- try:
- property_schema = cls.get_field_schema(field_name)
- jsonschema.validate(json.to_dict(data), property_schema)
- return None
- except jsonschema.ValidationError as e:
- return str(e)
-
- @classmethod
- def get_fields(cls) -> set[str]:
- return set(cls.model_fields.keys())
-
- @classmethod
- def get_required_fields(cls) -> set[str]:
- return {
- field
- for field, field_info in cls.model_fields.items()
- if field_info.is_required()
- }
-
- @classmethod
- def __pydantic_init_subclass__(cls, **kwargs):
- """Validates the schema definition. Rules:
- - Fields with annotation `CredentialsMetaInput` MUST be
- named `credentials` or `*_credentials`
- - Fields named `credentials` or `*_credentials` MUST be
- of type `CredentialsMetaInput`
- """
- super().__pydantic_init_subclass__(**kwargs)
-
- # Reset cached JSON schema to prevent inheriting it from parent class
- cls.cached_jsonschema = {}
-
- credentials_fields = cls.get_credentials_fields()
-
- for field_name in cls.get_fields():
- if is_credentials_field_name(field_name):
- if field_name not in credentials_fields:
- raise TypeError(
- f"Credentials field '{field_name}' on {cls.__qualname__} "
- f"is not of type {CredentialsMetaInput.__name__}"
- )
-
- CredentialsMetaInput.validate_credentials_field_schema(
- cls.get_field_schema(field_name), field_name
- )
-
- elif field_name in credentials_fields:
- raise KeyError(
- f"Credentials field '{field_name}' on {cls.__qualname__} "
- "has invalid name: must be 'credentials' or *_credentials"
- )
-
- @classmethod
- def get_credentials_fields(cls) -> dict[str, type[CredentialsMetaInput]]:
- return {
- field_name: info.annotation
- for field_name, info in cls.model_fields.items()
- if (
- inspect.isclass(info.annotation)
- and issubclass(
- get_origin(info.annotation) or info.annotation,
- CredentialsMetaInput,
- )
- )
- }
-
- @classmethod
- def get_auto_credentials_fields(cls) -> dict[str, dict[str, Any]]:
- """
- Get fields that have auto_credentials metadata (e.g., GoogleDriveFileInput).
-
- Returns a dict mapping kwarg_name -> {field_name, auto_credentials_config}
-
- Raises:
- ValueError: If multiple fields have the same kwarg_name, as this would
- cause silent overwriting and only the last field would be processed.
- """
- result: dict[str, dict[str, Any]] = {}
- schema = cls.jsonschema()
- properties = schema.get("properties", {})
-
- for field_name, field_schema in properties.items():
- auto_creds = field_schema.get("auto_credentials")
- if auto_creds:
- kwarg_name = auto_creds.get("kwarg_name", "credentials")
- if kwarg_name in result:
- raise ValueError(
- f"Duplicate auto_credentials kwarg_name '{kwarg_name}' "
- f"in fields '{result[kwarg_name]['field_name']}' and "
- f"'{field_name}' on {cls.__qualname__}"
- )
- result[kwarg_name] = {
- "field_name": field_name,
- "config": auto_creds,
- }
- return result
-
- @classmethod
- def get_credentials_fields_info(cls) -> dict[str, CredentialsFieldInfo]:
- result = {}
-
- # Regular credentials fields
- for field_name in cls.get_credentials_fields().keys():
- result[field_name] = CredentialsFieldInfo.model_validate(
- cls.get_field_schema(field_name), by_alias=True
- )
-
- # Auto-generated credentials fields (from GoogleDriveFileInput etc.)
- for kwarg_name, info in cls.get_auto_credentials_fields().items():
- config = info["config"]
- # Build a schema-like dict that CredentialsFieldInfo can parse
- auto_schema = {
- "credentials_provider": [config.get("provider", "google")],
- "credentials_types": [config.get("type", "oauth2")],
- "credentials_scopes": config.get("scopes"),
- }
- result[kwarg_name] = CredentialsFieldInfo.model_validate(
- auto_schema, by_alias=True
- )
-
- return result
-
- @classmethod
- def get_input_defaults(cls, data: BlockInput) -> BlockInput:
- return data # Return as is, by default.
-
- @classmethod
- def get_missing_links(cls, data: BlockInput, links: list["Link"]) -> set[str]:
- input_fields_from_nodes = {link.sink_name for link in links}
- return input_fields_from_nodes - set(data)
-
- @classmethod
- def get_missing_input(cls, data: BlockInput) -> set[str]:
- return cls.get_required_fields() - set(data)
-
-
-class BlockSchemaInput(BlockSchema):
- """
- Base schema class for block inputs.
- All block input schemas should extend this class for consistency.
- """
-
- pass
-
-
-class BlockSchemaOutput(BlockSchema):
- """
- Base schema class for block outputs that includes a standard error field.
- All block output schemas should extend this class to ensure consistent error handling.
- """
-
- error: str = SchemaField(
- description="Error message if the operation failed", default=""
- )
-
-
-BlockSchemaInputType = TypeVar("BlockSchemaInputType", bound=BlockSchemaInput)
-BlockSchemaOutputType = TypeVar("BlockSchemaOutputType", bound=BlockSchemaOutput)
-
-
-class EmptyInputSchema(BlockSchemaInput):
- pass
-
-
-class EmptyOutputSchema(BlockSchemaOutput):
- pass
-
-
-# For backward compatibility - will be deprecated
-EmptySchema = EmptyOutputSchema
-
-
-# --8<-- [start:BlockWebhookConfig]
-class BlockManualWebhookConfig(BaseModel):
- """
- Configuration model for webhook-triggered blocks on which
- the user has to manually set up the webhook at the provider.
- """
-
- provider: ProviderName
- """The service provider that the webhook connects to"""
-
- webhook_type: str
- """
- Identifier for the webhook type. E.g. GitHub has repo and organization level hooks.
-
- Only for use in the corresponding `WebhooksManager`.
- """
-
- event_filter_input: str = ""
- """
- Name of the block's event filter input.
- Leave empty if the corresponding webhook doesn't have distinct event/payload types.
- """
-
- event_format: str = "{event}"
- """
- Template string for the event(s) that a block instance subscribes to.
- Applied individually to each event selected in the event filter input.
-
- Example: `"pull_request.{event}"` -> `"pull_request.opened"`
- """
-
-
-class BlockWebhookConfig(BlockManualWebhookConfig):
- """
- Configuration model for webhook-triggered blocks for which
- the webhook can be automatically set up through the provider's API.
- """
-
- resource_format: str
- """
- Template string for the resource that a block instance subscribes to.
- Fields will be filled from the block's inputs (except `payload`).
-
- Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented)
-
- Only for use in the corresponding `WebhooksManager`.
- """
- # --8<-- [end:BlockWebhookConfig]
-
-
-class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
- def __init__(
- self,
- id: str = "",
- description: str = "",
- contributors: list[ContributorDetails] = [],
- categories: set[BlockCategory] | None = None,
- input_schema: Type[BlockSchemaInputType] = EmptyInputSchema,
- output_schema: Type[BlockSchemaOutputType] = EmptyOutputSchema,
- test_input: BlockInput | list[BlockInput] | None = None,
- test_output: BlockTestOutput | list[BlockTestOutput] | None = None,
- test_mock: dict[str, Any] | None = None,
- test_credentials: Optional[Credentials | dict[str, Credentials]] = None,
- disabled: bool = False,
- static_output: bool = False,
- block_type: BlockType = BlockType.STANDARD,
- webhook_config: Optional[BlockWebhookConfig | BlockManualWebhookConfig] = None,
- is_sensitive_action: bool = False,
- ):
- """
- Initialize the block with the given schema.
-
- Args:
- id: The unique identifier for the block, this value will be persisted in the
- DB. So it should be a unique and constant across the application run.
- Use the UUID format for the ID.
- description: The description of the block, explaining what the block does.
- contributors: The list of contributors who contributed to the block.
- input_schema: The schema, defined as a Pydantic model, for the input data.
- output_schema: The schema, defined as a Pydantic model, for the output data.
- test_input: The list or single sample input data for the block, for testing.
- test_output: The list or single expected output if the test_input is run.
- test_mock: function names on the block implementation to mock on test run.
- disabled: If the block is disabled, it will not be available for execution.
- static_output: Whether the output links of the block are static by default.
- """
- self.id = id
- self.input_schema = input_schema
- self.output_schema = output_schema
- self.test_input = test_input
- self.test_output = test_output
- self.test_mock = test_mock
- self.test_credentials = test_credentials
- self.description = description
- self.categories = categories or set()
- self.contributors = contributors or set()
- self.disabled = disabled
- self.static_output = static_output
- self.block_type = block_type
- self.webhook_config = webhook_config
- self.is_sensitive_action = is_sensitive_action
- self.execution_stats: NodeExecutionStats = NodeExecutionStats()
-
- if self.webhook_config:
- if isinstance(self.webhook_config, BlockWebhookConfig):
- # Enforce presence of credentials field on auto-setup webhook blocks
- if not (cred_fields := self.input_schema.get_credentials_fields()):
- raise TypeError(
- "credentials field is required on auto-setup webhook blocks"
- )
- # Disallow multiple credentials inputs on webhook blocks
- elif len(cred_fields) > 1:
- raise ValueError(
- "Multiple credentials inputs not supported on webhook blocks"
- )
-
- self.block_type = BlockType.WEBHOOK
- else:
- self.block_type = BlockType.WEBHOOK_MANUAL
-
- # Enforce shape of webhook event filter, if present
- if self.webhook_config.event_filter_input:
- event_filter_field = self.input_schema.model_fields[
- self.webhook_config.event_filter_input
- ]
- if not (
- isinstance(event_filter_field.annotation, type)
- and issubclass(event_filter_field.annotation, BaseModel)
- and all(
- field.annotation is bool
- for field in event_filter_field.annotation.model_fields.values()
- )
- ):
- raise NotImplementedError(
- f"{self.name} has an invalid webhook event selector: "
- "field must be a BaseModel and all its fields must be boolean"
- )
-
- # Enforce presence of 'payload' input
- if "payload" not in self.input_schema.model_fields:
- raise TypeError(
- f"{self.name} is webhook-triggered but has no 'payload' input"
- )
-
- # Disable webhook-triggered block if webhook functionality not available
- if not app_config.platform_base_url:
- self.disabled = True
-
- @classmethod
- def create(cls: Type["Block"]) -> "Block":
- return cls()
-
- @abstractmethod
- async def run(self, input_data: BlockSchemaInputType, **kwargs) -> BlockOutput:
- """
- Run the block with the given input data.
- Args:
- input_data: The input data with the structure of input_schema.
-
- Kwargs: Currently 14/02/2025 these include
- graph_id: The ID of the graph.
- node_id: The ID of the node.
- graph_exec_id: The ID of the graph execution.
- node_exec_id: The ID of the node execution.
- user_id: The ID of the user.
-
- Returns:
- A Generator that yields (output_name, output_data).
- output_name: One of the output name defined in Block's output_schema.
- output_data: The data for the output_name, matching the defined schema.
- """
- # --- satisfy the type checker, never executed -------------
- if False: # noqa: SIM115
- yield "name", "value" # pyright: ignore[reportMissingYield]
- raise NotImplementedError(f"{self.name} does not implement the run method.")
-
- async def run_once(
- self, input_data: BlockSchemaInputType, output: str, **kwargs
- ) -> Any:
- async for item in self.run(input_data, **kwargs):
- name, data = item
- if name == output:
- return data
- raise ValueError(f"{self.name} did not produce any output for {output}")
-
- def merge_stats(self, stats: NodeExecutionStats) -> NodeExecutionStats:
- self.execution_stats += stats
- return self.execution_stats
-
- @property
- def name(self):
- return self.__class__.__name__
-
- def to_dict(self):
- return {
- "id": self.id,
- "name": self.name,
- "inputSchema": self.input_schema.jsonschema(),
- "outputSchema": self.output_schema.jsonschema(),
- "description": self.description,
- "categories": [category.dict() for category in self.categories],
- "contributors": [
- contributor.model_dump() for contributor in self.contributors
- ],
- "staticOutput": self.static_output,
- "uiType": self.block_type.value,
- }
-
- def get_info(self) -> BlockInfo:
- from backend.data.credit import get_block_cost
-
- return BlockInfo(
- id=self.id,
- name=self.name,
- inputSchema=self.input_schema.jsonschema(),
- outputSchema=self.output_schema.jsonschema(),
- costs=get_block_cost(self),
- description=self.description,
- categories=[category.dict() for category in self.categories],
- contributors=[
- contributor.model_dump() for contributor in self.contributors
- ],
- staticOutput=self.static_output,
- uiType=self.block_type.value,
- )
-
- async def execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
- try:
- async for output_name, output_data in self._execute(input_data, **kwargs):
- yield output_name, output_data
- except Exception as ex:
- if isinstance(ex, BlockError):
- raise ex
- else:
- raise (
- BlockExecutionError
- if isinstance(ex, ValueError)
- else BlockUnknownError
- )(
- message=str(ex),
- block_name=self.name,
- block_id=self.id,
- ) from ex
-
- async def is_block_exec_need_review(
- self,
- input_data: BlockInput,
- *,
- user_id: str,
- node_id: str,
- node_exec_id: str,
- graph_exec_id: str,
- graph_id: str,
- graph_version: int,
- execution_context: "ExecutionContext",
- **kwargs,
- ) -> tuple[bool, BlockInput]:
- """
- Check if this block execution needs human review and handle the review process.
-
- Returns:
- Tuple of (should_pause, input_data_to_use)
- - should_pause: True if execution should be paused for review
- - input_data_to_use: The input data to use (may be modified by reviewer)
- """
- if not (
- self.is_sensitive_action and execution_context.sensitive_action_safe_mode
- ):
- return False, input_data
-
- from backend.blocks.helpers.review import HITLReviewHelper
-
- # Handle the review request and get decision
- decision = await HITLReviewHelper.handle_review_decision(
- input_data=input_data,
- user_id=user_id,
- node_id=node_id,
- node_exec_id=node_exec_id,
- graph_exec_id=graph_exec_id,
- graph_id=graph_id,
- graph_version=graph_version,
- block_name=self.name,
- editable=True,
- )
-
- if decision is None:
- # We're awaiting review - pause execution
- return True, input_data
-
- if not decision.should_proceed:
- # Review was rejected, raise an error to stop execution
- raise BlockExecutionError(
- message=f"Block execution rejected by reviewer: {decision.message}",
- block_name=self.name,
- block_id=self.id,
- )
-
- # Review was approved - use the potentially modified data
- # ReviewResult.data must be a dict for block inputs
- reviewed_data = decision.review_result.data
- if not isinstance(reviewed_data, dict):
- raise BlockExecutionError(
- message=f"Review data must be a dict for block input, got {type(reviewed_data).__name__}",
- block_name=self.name,
- block_id=self.id,
- )
- return False, reviewed_data
-
- async def _execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
- # Check for review requirement only if running within a graph execution context
- # Direct block execution (e.g., from chat) skips the review process
- has_graph_context = all(
- key in kwargs
- for key in (
- "node_exec_id",
- "graph_exec_id",
- "graph_id",
- "execution_context",
- )
- )
- if has_graph_context:
- should_pause, input_data = await self.is_block_exec_need_review(
- input_data, **kwargs
- )
- if should_pause:
- return
-
- # Validate the input data (original or reviewer-modified) once
- if error := self.input_schema.validate_data(input_data):
- raise BlockInputError(
- message=f"Unable to execute block with invalid input data: {error}",
- block_name=self.name,
- block_id=self.id,
- )
-
- # Use the validated input data
- async for output_name, output_data in self.run(
- self.input_schema(**{k: v for k, v in input_data.items() if v is not None}),
- **kwargs,
- ):
- if output_name == "error":
- raise BlockExecutionError(
- message=output_data, block_name=self.name, block_id=self.id
- )
- if self.block_type == BlockType.STANDARD and (
- error := self.output_schema.validate_field(output_name, output_data)
- ):
- raise BlockOutputError(
- message=f"Block produced an invalid output data: {error}",
- block_name=self.name,
- block_id=self.id,
- )
- yield output_name, output_data
-
- def is_triggered_by_event_type(
- self, trigger_config: dict[str, Any], event_type: str
- ) -> bool:
- if not self.webhook_config:
- raise TypeError("This method can't be used on non-trigger blocks")
- if not self.webhook_config.event_filter_input:
- return True
- event_filter = trigger_config.get(self.webhook_config.event_filter_input)
- if not event_filter:
- raise ValueError("Event filter is not configured on trigger")
- return event_type in [
- self.webhook_config.event_format.format(event=k)
- for k in event_filter
- if event_filter[k] is True
- ]
-
-
-# Type alias for any block with standard input/output schemas
-AnyBlockSchema: TypeAlias = Block[BlockSchemaInput, BlockSchemaOutput]
-
-
-# ======================= Block Helper Functions ======================= #
-
-
-def get_blocks() -> dict[str, Type[Block]]:
- from backend.blocks import load_all_blocks
-
- return load_all_blocks()
-
-
-def is_block_auth_configured(
- block_cls: type[AnyBlockSchema],
-) -> bool:
- """
- Check if a block has a valid authentication method configured at runtime.
-
- For example if a block is an OAuth-only block and there env vars are not set,
- do not show it in the UI.
-
- """
- from backend.sdk.registry import AutoRegistry
-
- # Create an instance to access input_schema
- try:
- block = block_cls()
- except Exception as e:
- # If we can't create a block instance, assume it's not OAuth-only
- logger.error(f"Error creating block instance for {block_cls.__name__}: {e}")
- return True
- logger.debug(
- f"Checking if block {block_cls.__name__} has a valid provider configured"
- )
-
- # Get all credential inputs from input schema
- credential_inputs = block.input_schema.get_credentials_fields_info()
- required_inputs = block.input_schema.get_required_fields()
- if not credential_inputs:
- logger.debug(
- f"Block {block_cls.__name__} has no credential inputs - Treating as valid"
- )
- return True
-
- # Check credential inputs
- if len(required_inputs.intersection(credential_inputs.keys())) == 0:
- logger.debug(
- f"Block {block_cls.__name__} has only optional credential inputs"
- " - will work without credentials configured"
- )
-
- # Check if the credential inputs for this block are correctly configured
- for field_name, field_info in credential_inputs.items():
- provider_names = field_info.provider
- if not provider_names:
- logger.warning(
- f"Block {block_cls.__name__} "
- f"has credential input '{field_name}' with no provider options"
- " - Disabling"
- )
- return False
-
- # If a field has multiple possible providers, each one needs to be usable to
- # prevent breaking the UX
- for _provider_name in provider_names:
- provider_name = _provider_name.value
- if provider_name in ProviderName.__members__.values():
- logger.debug(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"provider '{provider_name}' is part of the legacy provider system"
- " - Treating as valid"
- )
- break
-
- provider = AutoRegistry.get_provider(provider_name)
- if not provider:
- logger.warning(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"refers to unknown provider '{provider_name}' - Disabling"
- )
- return False
-
- # Check the provider's supported auth types
- if field_info.supported_types != provider.supported_auth_types:
- logger.warning(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"has mismatched supported auth types (field <> Provider): "
- f"{field_info.supported_types} != {provider.supported_auth_types}"
- )
-
- if not (supported_auth_types := provider.supported_auth_types):
- # No auth methods are been configured for this provider
- logger.warning(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"provider '{provider_name}' "
- "has no authentication methods configured - Disabling"
- )
- return False
-
- # Check if provider supports OAuth
- if "oauth2" in supported_auth_types:
- # Check if OAuth environment variables are set
- if (oauth_config := provider.oauth_config) and bool(
- os.getenv(oauth_config.client_id_env_var)
- and os.getenv(oauth_config.client_secret_env_var)
- ):
- logger.debug(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"provider '{provider_name}' is configured for OAuth"
- )
- else:
- logger.error(
- f"Block {block_cls.__name__} credential input '{field_name}' "
- f"provider '{provider_name}' "
- "is missing OAuth client ID or secret - Disabling"
- )
- return False
-
- logger.debug(
- f"Block {block_cls.__name__} credential input '{field_name}' is valid; "
- f"supported credential types: {', '.join(field_info.supported_types)}"
- )
-
- return True
-
-
async def initialize_blocks() -> None:
+ from backend.blocks import get_blocks
from backend.sdk.cost_integration import sync_all_provider_costs
from backend.util.retry import func_retry
sync_all_provider_costs()
@func_retry
- async def sync_block_to_db(block: Block) -> None:
+ async def sync_block_to_db(block: "AnyBlockSchema") -> None:
existing_block = await AgentBlock.prisma().find_first(
where={"OR": [{"id": block.id}, {"name": block.name}]}
)
@@ -932,36 +77,3 @@ async def initialize_blocks() -> None:
f"Failed to sync {len(failed_blocks)} block(s) to database: "
f"{', '.join(failed_blocks)}. These blocks are still available in memory."
)
-
-
-# Note on the return type annotation: https://github.com/microsoft/pyright/issues/10281
-def get_block(block_id: str) -> AnyBlockSchema | None:
- cls = get_blocks().get(block_id)
- return cls() if cls else None
-
-
-@cached(ttl_seconds=3600)
-def get_webhook_block_ids() -> Sequence[str]:
- return [
- id
- for id, B in get_blocks().items()
- if B().block_type in (BlockType.WEBHOOK, BlockType.WEBHOOK_MANUAL)
- ]
-
-
-@cached(ttl_seconds=3600)
-def get_io_block_ids() -> Sequence[str]:
- return [
- id
- for id, B in get_blocks().items()
- if B().block_type in (BlockType.INPUT, BlockType.OUTPUT)
- ]
-
-
-@cached(ttl_seconds=3600)
-def get_human_in_the_loop_block_ids() -> Sequence[str]:
- return [
- id
- for id, B in get_blocks().items()
- if B().block_type == BlockType.HUMAN_IN_THE_LOOP
- ]
diff --git a/autogpt_platform/backend/backend/data/block_cost_config.py b/autogpt_platform/backend/backend/data/block_cost_config.py
index ec35afa401..c7fb12deb6 100644
--- a/autogpt_platform/backend/backend/data/block_cost_config.py
+++ b/autogpt_platform/backend/backend/data/block_cost_config.py
@@ -1,5 +1,6 @@
from typing import Type
+from backend.blocks._base import Block, BlockCost, BlockCostType
from backend.blocks.ai_image_customizer import AIImageCustomizerBlock, GeminiImageModel
from backend.blocks.ai_image_generator_block import AIImageGeneratorBlock, ImageGenModel
from backend.blocks.ai_music_generator import AIMusicGeneratorBlock
@@ -37,7 +38,6 @@ from backend.blocks.smart_decision_maker import SmartDecisionMakerBlock
from backend.blocks.talking_head import CreateTalkingAvatarVideoBlock
from backend.blocks.text_to_speech_block import UnrealTextToSpeechBlock
from backend.blocks.video.narration import VideoNarrationBlock
-from backend.data.block import Block, BlockCost, BlockCostType
from backend.integrations.credentials_store import (
aiml_api_credentials,
anthropic_credentials,
diff --git a/autogpt_platform/backend/backend/data/credit.py b/autogpt_platform/backend/backend/data/credit.py
index f3c5365446..04f91d8d61 100644
--- a/autogpt_platform/backend/backend/data/credit.py
+++ b/autogpt_platform/backend/backend/data/credit.py
@@ -38,7 +38,7 @@ from backend.util.retry import func_retry
from backend.util.settings import Settings
if TYPE_CHECKING:
- from backend.data.block import Block, BlockCost
+ from backend.blocks._base import Block, BlockCost
settings = Settings()
stripe.api_key = settings.secrets.stripe_api_key
diff --git a/autogpt_platform/backend/backend/data/credit_test.py b/autogpt_platform/backend/backend/data/credit_test.py
index 2b10c62882..cb5973c74f 100644
--- a/autogpt_platform/backend/backend/data/credit_test.py
+++ b/autogpt_platform/backend/backend/data/credit_test.py
@@ -4,8 +4,8 @@ import pytest
from prisma.enums import CreditTransactionType
from prisma.models import CreditTransaction, UserBalance
+from backend.blocks import get_block
from backend.blocks.llm import AITextGeneratorBlock
-from backend.data.block import get_block
from backend.data.credit import BetaUserCredit, UsageTransactionMetadata
from backend.data.execution import ExecutionContext, NodeExecutionEntry
from backend.data.user import DEFAULT_USER_ID
diff --git a/autogpt_platform/backend/backend/data/execution.py b/autogpt_platform/backend/backend/data/execution.py
index def3d14fda..2f9258dc55 100644
--- a/autogpt_platform/backend/backend/data/execution.py
+++ b/autogpt_platform/backend/backend/data/execution.py
@@ -4,7 +4,6 @@ from collections import defaultdict
from datetime import datetime, timedelta, timezone
from enum import Enum
from typing import (
- TYPE_CHECKING,
Annotated,
Any,
AsyncGenerator,
@@ -39,6 +38,8 @@ from prisma.types import (
from pydantic import BaseModel, ConfigDict, JsonValue, ValidationError
from pydantic.fields import Field
+from backend.blocks import get_block, get_io_block_ids, get_webhook_block_ids
+from backend.blocks._base import BlockType
from backend.util import type as type_utils
from backend.util.exceptions import DatabaseError
from backend.util.json import SafeJson
@@ -47,14 +48,7 @@ from backend.util.retry import func_retry
from backend.util.settings import Config
from backend.util.truncate import truncate
-from .block import (
- BlockInput,
- BlockType,
- CompletedBlockOutput,
- get_block,
- get_io_block_ids,
- get_webhook_block_ids,
-)
+from .block import BlockInput, CompletedBlockOutput
from .db import BaseDbModel, query_raw_with_schema
from .event_bus import AsyncRedisEventBus, RedisEventBus
from .includes import (
@@ -63,10 +57,12 @@ from .includes import (
GRAPH_EXECUTION_INCLUDE_WITH_NODES,
graph_execution_include,
)
-from .model import CredentialsMetaInput, GraphExecutionStats, NodeExecutionStats
-
-if TYPE_CHECKING:
- pass
+from .model import (
+ CredentialsMetaInput,
+ GraphExecutionStats,
+ GraphInput,
+ NodeExecutionStats,
+)
T = TypeVar("T")
@@ -167,7 +163,7 @@ class GraphExecutionMeta(BaseDbModel):
user_id: str
graph_id: str
graph_version: int
- inputs: Optional[BlockInput] # no default -> required in the OpenAPI spec
+ inputs: Optional[GraphInput] # no default -> required in the OpenAPI spec
credential_inputs: Optional[dict[str, CredentialsMetaInput]]
nodes_input_masks: Optional[dict[str, BlockInput]]
preset_id: Optional[str]
@@ -272,7 +268,7 @@ class GraphExecutionMeta(BaseDbModel):
user_id=_graph_exec.userId,
graph_id=_graph_exec.agentGraphId,
graph_version=_graph_exec.agentGraphVersion,
- inputs=cast(BlockInput | None, _graph_exec.inputs),
+ inputs=cast(GraphInput | None, _graph_exec.inputs),
credential_inputs=(
{
name: CredentialsMetaInput.model_validate(cmi)
@@ -314,7 +310,7 @@ class GraphExecutionMeta(BaseDbModel):
class GraphExecution(GraphExecutionMeta):
- inputs: BlockInput # type: ignore - incompatible override is intentional
+ inputs: GraphInput # type: ignore - incompatible override is intentional
outputs: CompletedBlockOutput
@staticmethod
@@ -447,7 +443,7 @@ class NodeExecutionResult(BaseModel):
for name, messages in stats.cleared_inputs.items():
input_data[name] = messages[-1] if messages else ""
elif _node_exec.executionData:
- input_data = type_utils.convert(_node_exec.executionData, dict[str, Any])
+ input_data = type_utils.convert(_node_exec.executionData, BlockInput)
else:
input_data: BlockInput = defaultdict()
for data in _node_exec.Input or []:
@@ -867,7 +863,7 @@ async def upsert_execution_output(
async def get_execution_outputs_by_node_exec_id(
node_exec_id: str,
-) -> dict[str, Any]:
+) -> CompletedBlockOutput:
"""
Get all execution outputs for a specific node execution ID.
@@ -1498,7 +1494,7 @@ async def get_graph_execution_by_share_token(
# The executionData contains the structured input with 'name' and 'value' fields
if hasattr(node_exec, "executionData") and node_exec.executionData:
exec_data = type_utils.convert(
- node_exec.executionData, dict[str, Any]
+ node_exec.executionData, BlockInput
)
if "name" in exec_data:
name = exec_data["name"]
diff --git a/autogpt_platform/backend/backend/data/graph.py b/autogpt_platform/backend/backend/data/graph.py
index 2433a5d270..f39a0144e7 100644
--- a/autogpt_platform/backend/backend/data/graph.py
+++ b/autogpt_platform/backend/backend/data/graph.py
@@ -23,38 +23,29 @@ from prisma.types import (
from pydantic import BaseModel, BeforeValidator, Field
from pydantic.fields import computed_field
+from backend.blocks import get_block, get_blocks
+from backend.blocks._base import Block, BlockType, EmptySchema
from backend.blocks.agent import AgentExecutorBlock
from backend.blocks.io import AgentInputBlock, AgentOutputBlock
from backend.blocks.llm import LlmModel
-from backend.data.db import prisma as db
-from backend.data.dynamic_fields import is_tool_pin, sanitize_pin_name
-from backend.data.includes import MAX_GRAPH_VERSIONS_FETCH
-from backend.data.model import (
- CredentialsFieldInfo,
- CredentialsMetaInput,
- is_credentials_field_name,
-)
from backend.integrations.providers import ProviderName
from backend.util import type as type_utils
from backend.util.exceptions import GraphNotAccessibleError, GraphNotInLibraryError
from backend.util.json import SafeJson
from backend.util.models import Pagination
-from .block import (
- AnyBlockSchema,
- Block,
- BlockInput,
- BlockType,
- EmptySchema,
- get_block,
- get_blocks,
-)
-from .db import BaseDbModel, query_raw_with_schema, transaction
-from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
+from .block import BlockInput
+from .db import BaseDbModel
+from .db import prisma as db
+from .db import query_raw_with_schema, transaction
+from .dynamic_fields import is_tool_pin, sanitize_pin_name
+from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE, MAX_GRAPH_VERSIONS_FETCH
+from .model import CredentialsFieldInfo, CredentialsMetaInput, is_credentials_field_name
if TYPE_CHECKING:
+ from backend.blocks._base import AnyBlockSchema
+
from .execution import NodesInputMasks
- from .integrations import Webhook
logger = logging.getLogger(__name__)
@@ -128,7 +119,7 @@ class Node(BaseDbModel):
return self.metadata.get("credentials_optional", False)
@property
- def block(self) -> AnyBlockSchema | "_UnknownBlockBase":
+ def block(self) -> "AnyBlockSchema | _UnknownBlockBase":
"""Get the block for this node. Returns UnknownBlock if block is deleted/missing."""
block = get_block(self.block_id)
if not block:
@@ -145,21 +136,18 @@ class NodeModel(Node):
graph_version: int
webhook_id: Optional[str] = None
- webhook: Optional["Webhook"] = None
+ # webhook: Optional["Webhook"] = None # deprecated
@staticmethod
def from_db(node: AgentNode, for_export: bool = False) -> "NodeModel":
- from .integrations import Webhook
-
obj = NodeModel(
id=node.id,
block_id=node.agentBlockId,
- input_default=type_utils.convert(node.constantInput, dict[str, Any]),
+ input_default=type_utils.convert(node.constantInput, BlockInput),
metadata=type_utils.convert(node.metadata, dict[str, Any]),
graph_id=node.agentGraphId,
graph_version=node.agentGraphVersion,
webhook_id=node.webhookId,
- webhook=Webhook.from_db(node.Webhook) if node.Webhook else None,
)
obj.input_links = [Link.from_db(link) for link in node.Input or []]
obj.output_links = [Link.from_db(link) for link in node.Output or []]
@@ -192,14 +180,13 @@ class NodeModel(Node):
# Remove webhook info
stripped_node.webhook_id = None
- stripped_node.webhook = None
return stripped_node
@staticmethod
def _filter_secrets_from_node_input(
- input_data: dict[str, Any], schema: dict[str, Any] | None
- ) -> dict[str, Any]:
+ input_data: BlockInput, schema: dict[str, Any] | None
+ ) -> BlockInput:
sensitive_keys = ["credentials", "api_key", "password", "token", "secret"]
field_schemas = schema.get("properties", {}) if schema else {}
result = {}
diff --git a/autogpt_platform/backend/backend/data/graph_test.py b/autogpt_platform/backend/backend/data/graph_test.py
index 8b7eadb887..442c8ed4be 100644
--- a/autogpt_platform/backend/backend/data/graph_test.py
+++ b/autogpt_platform/backend/backend/data/graph_test.py
@@ -9,9 +9,9 @@ from pytest_snapshot.plugin import Snapshot
import backend.api.features.store.model as store
from backend.api.model import CreateGraph
+from backend.blocks._base import BlockSchema, BlockSchemaInput
from backend.blocks.basic import StoreValueBlock
from backend.blocks.io import AgentInputBlock, AgentOutputBlock
-from backend.data.block import BlockSchema, BlockSchemaInput
from backend.data.graph import Graph, Link, Node
from backend.data.model import SchemaField
from backend.data.user import DEFAULT_USER_ID
@@ -323,7 +323,6 @@ async def test_clean_graph(server: SpinTestServer):
# Verify webhook info is removed (if any nodes had it)
for node in cleaned_graph.nodes:
assert node.webhook_id is None
- assert node.webhook is None
@pytest.mark.asyncio(loop_scope="session")
diff --git a/autogpt_platform/backend/backend/data/integrations.py b/autogpt_platform/backend/backend/data/integrations.py
index 5f44f928bd..a6f007ce99 100644
--- a/autogpt_platform/backend/backend/data/integrations.py
+++ b/autogpt_platform/backend/backend/data/integrations.py
@@ -1,5 +1,5 @@
import logging
-from typing import TYPE_CHECKING, AsyncGenerator, Literal, Optional, overload
+from typing import AsyncGenerator, Literal, Optional, overload
from prisma.models import AgentNode, AgentPreset, IntegrationWebhook
from prisma.types import (
@@ -22,9 +22,6 @@ from backend.integrations.webhooks.utils import webhook_ingress_url
from backend.util.exceptions import NotFoundError
from backend.util.json import SafeJson
-if TYPE_CHECKING:
- from backend.api.features.library.model import LibraryAgentPreset
-
from .db import BaseDbModel
from .graph import NodeModel
@@ -64,9 +61,18 @@ class Webhook(BaseDbModel):
)
+# LibraryAgentPreset import must be after Webhook definition to avoid
+# broken circular import:
+# integrations.py → library/model.py → integrations.py (for Webhook)
+from backend.api.features.library.model import LibraryAgentPreset # noqa: E402
+
+# Resolve forward refs
+LibraryAgentPreset.model_rebuild()
+
+
class WebhookWithRelations(Webhook):
triggered_nodes: list[NodeModel]
- triggered_presets: list["LibraryAgentPreset"]
+ triggered_presets: list[LibraryAgentPreset]
@staticmethod
def from_db(webhook: IntegrationWebhook):
@@ -75,11 +81,6 @@ class WebhookWithRelations(Webhook):
"AgentNodes and AgentPresets must be included in "
"IntegrationWebhook query with relations"
)
- # LibraryAgentPreset import is moved to TYPE_CHECKING to avoid circular import:
- # integrations.py → library/model.py → integrations.py (for Webhook)
- # Runtime import is used in WebhookWithRelations.from_db() method instead
- # Import at runtime to avoid circular dependency
- from backend.api.features.library.model import LibraryAgentPreset
return WebhookWithRelations(
**Webhook.from_db(webhook).model_dump(),
diff --git a/autogpt_platform/backend/backend/data/model.py b/autogpt_platform/backend/backend/data/model.py
index 7bdfef059b..e61f7efbd0 100644
--- a/autogpt_platform/backend/backend/data/model.py
+++ b/autogpt_platform/backend/backend/data/model.py
@@ -168,6 +168,9 @@ T = TypeVar("T")
logger = logging.getLogger(__name__)
+GraphInput = dict[str, Any]
+
+
class BlockSecret:
def __init__(self, key: Optional[str] = None, value: Optional[str] = None):
if value is not None:
diff --git a/autogpt_platform/backend/backend/executor/activity_status_generator.py b/autogpt_platform/backend/backend/executor/activity_status_generator.py
index 3bc6bcb876..8cc1da8957 100644
--- a/autogpt_platform/backend/backend/executor/activity_status_generator.py
+++ b/autogpt_platform/backend/backend/executor/activity_status_generator.py
@@ -13,8 +13,8 @@ except ImportError:
from pydantic import SecretStr
+from backend.blocks import get_block
from backend.blocks.llm import AIStructuredResponseGeneratorBlock, LlmModel
-from backend.data.block import get_block
from backend.data.execution import ExecutionStatus, NodeExecutionResult
from backend.data.model import APIKeyCredentials, GraphExecutionStats
from backend.util.feature_flag import Flag, is_feature_enabled
diff --git a/autogpt_platform/backend/backend/executor/manager.py b/autogpt_platform/backend/backend/executor/manager.py
index 7304653811..1f76458947 100644
--- a/autogpt_platform/backend/backend/executor/manager.py
+++ b/autogpt_platform/backend/backend/executor/manager.py
@@ -16,16 +16,12 @@ from pika.spec import Basic, BasicProperties
from prometheus_client import Gauge, start_http_server
from redis.asyncio.lock import Lock as AsyncRedisLock
+from backend.blocks import get_block
+from backend.blocks._base import BlockSchema
from backend.blocks.agent import AgentExecutorBlock
from backend.blocks.io import AgentOutputBlock
from backend.data import redis_client as redis
-from backend.data.block import (
- BlockInput,
- BlockOutput,
- BlockOutputEntry,
- BlockSchema,
- get_block,
-)
+from backend.data.block import BlockInput, BlockOutput, BlockOutputEntry
from backend.data.credit import UsageTransactionMetadata
from backend.data.dynamic_fields import parse_execution_output
from backend.data.execution import (
diff --git a/autogpt_platform/backend/backend/executor/scheduler.py b/autogpt_platform/backend/backend/executor/scheduler.py
index cbdc441718..94829f9837 100644
--- a/autogpt_platform/backend/backend/executor/scheduler.py
+++ b/autogpt_platform/backend/backend/executor/scheduler.py
@@ -24,9 +24,8 @@ from dotenv import load_dotenv
from pydantic import BaseModel, Field, ValidationError
from sqlalchemy import MetaData, create_engine
-from backend.data.block import BlockInput
from backend.data.execution import GraphExecutionWithNodes
-from backend.data.model import CredentialsMetaInput
+from backend.data.model import CredentialsMetaInput, GraphInput
from backend.executor import utils as execution_utils
from backend.monitoring import (
NotificationJobArgs,
@@ -387,7 +386,7 @@ class GraphExecutionJobArgs(BaseModel):
graph_version: int
agent_name: str | None = None
cron: str
- input_data: BlockInput
+ input_data: GraphInput
input_credentials: dict[str, CredentialsMetaInput] = Field(default_factory=dict)
@@ -649,7 +648,7 @@ class Scheduler(AppService):
graph_id: str,
graph_version: int,
cron: str,
- input_data: BlockInput,
+ input_data: GraphInput,
input_credentials: dict[str, CredentialsMetaInput],
name: Optional[str] = None,
user_timezone: str | None = None,
diff --git a/autogpt_platform/backend/backend/executor/utils.py b/autogpt_platform/backend/backend/executor/utils.py
index d26424aefc..bb5da1e527 100644
--- a/autogpt_platform/backend/backend/executor/utils.py
+++ b/autogpt_platform/backend/backend/executor/utils.py
@@ -8,23 +8,18 @@ from typing import Mapping, Optional, cast
from pydantic import BaseModel, JsonValue, ValidationError
+from backend.blocks import get_block
+from backend.blocks._base import Block, BlockCostType, BlockType
from backend.data import execution as execution_db
from backend.data import graph as graph_db
from backend.data import human_review as human_review_db
from backend.data import onboarding as onboarding_db
from backend.data import user as user_db
-from backend.data.block import (
- Block,
- BlockCostType,
- BlockInput,
- BlockOutputEntry,
- BlockType,
- get_block,
-)
-from backend.data.block_cost_config import BLOCK_COSTS
-from backend.data.db import prisma
# Import dynamic field utilities from centralized location
+from backend.data.block import BlockInput, BlockOutputEntry
+from backend.data.block_cost_config import BLOCK_COSTS
+from backend.data.db import prisma
from backend.data.dynamic_fields import merge_execution_input
from backend.data.execution import (
ExecutionContext,
@@ -35,7 +30,7 @@ from backend.data.execution import (
NodesInputMasks,
)
from backend.data.graph import GraphModel, Node
-from backend.data.model import USER_TIMEZONE_NOT_SET, CredentialsMetaInput
+from backend.data.model import USER_TIMEZONE_NOT_SET, CredentialsMetaInput, GraphInput
from backend.data.rabbitmq import Exchange, ExchangeType, Queue, RabbitMQConfig
from backend.util.clients import (
get_async_execution_event_bus,
@@ -426,7 +421,7 @@ async def validate_graph_with_credentials(
async def _construct_starting_node_execution_input(
graph: GraphModel,
user_id: str,
- graph_inputs: BlockInput,
+ graph_inputs: GraphInput,
nodes_input_masks: Optional[NodesInputMasks] = None,
) -> tuple[list[tuple[str, BlockInput]], set[str]]:
"""
@@ -438,7 +433,7 @@ async def _construct_starting_node_execution_input(
Args:
graph (GraphModel): The graph model to execute.
user_id (str): The ID of the user executing the graph.
- data (BlockInput): The input data for the graph execution.
+ data (GraphInput): The input data for the graph execution.
node_credentials_map: `dict[node_id, dict[input_name, CredentialsMetaInput]]`
Returns:
@@ -496,7 +491,7 @@ async def _construct_starting_node_execution_input(
async def validate_and_construct_node_execution_input(
graph_id: str,
user_id: str,
- graph_inputs: BlockInput,
+ graph_inputs: GraphInput,
graph_version: Optional[int] = None,
graph_credentials_inputs: Optional[Mapping[str, CredentialsMetaInput]] = None,
nodes_input_masks: Optional[NodesInputMasks] = None,
@@ -796,7 +791,7 @@ async def stop_graph_execution(
async def add_graph_execution(
graph_id: str,
user_id: str,
- inputs: Optional[BlockInput] = None,
+ inputs: Optional[GraphInput] = None,
preset_id: Optional[str] = None,
graph_version: Optional[int] = None,
graph_credentials_inputs: Optional[Mapping[str, CredentialsMetaInput]] = None,
diff --git a/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py b/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py
index 5fb9198c4d..99eee404b9 100644
--- a/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py
+++ b/autogpt_platform/backend/backend/integrations/webhooks/graph_lifecycle_hooks.py
@@ -2,8 +2,9 @@ import asyncio
import logging
from typing import TYPE_CHECKING, Optional, cast, overload
-from backend.data.block import BlockSchema
+from backend.blocks._base import BlockSchema
from backend.data.graph import set_node_webhook
+from backend.data.integrations import get_webhook
from backend.integrations.creds_manager import IntegrationCredentialsManager
from . import get_webhook_manager, supports_webhooks
@@ -113,31 +114,32 @@ async def on_node_deactivate(
webhooks_manager = get_webhook_manager(provider)
- if node.webhook_id:
- logger.debug(f"Node #{node.id} has webhook_id {node.webhook_id}")
- if not node.webhook:
- logger.error(f"Node #{node.id} has webhook_id but no webhook object")
- raise ValueError("node.webhook not included")
+ if webhook_id := node.webhook_id:
+ logger.warning(
+ f"Node #{node.id} still attached to webhook #{webhook_id} - "
+ "did migration by `migrate_legacy_triggered_graphs` fail? "
+ "Triggered nodes are deprecated since Significant-Gravitas/AutoGPT#10418."
+ )
+ webhook = await get_webhook(webhook_id)
# Detach webhook from node
logger.debug(f"Detaching webhook from node #{node.id}")
updated_node = await set_node_webhook(node.id, None)
# Prune and deregister the webhook if it is no longer used anywhere
- webhook = node.webhook
logger.debug(
f"Pruning{' and deregistering' if credentials else ''} "
- f"webhook #{webhook.id}"
+ f"webhook #{webhook_id}"
)
await webhooks_manager.prune_webhook_if_dangling(
- user_id, webhook.id, credentials
+ user_id, webhook_id, credentials
)
if (
cast(BlockSchema, block.input_schema).get_credentials_fields()
and not credentials
):
logger.warning(
- f"Cannot deregister webhook #{webhook.id}: credentials "
+ f"Cannot deregister webhook #{webhook_id}: credentials "
f"#{webhook.credentials_id} not available "
f"({webhook.provider.value} webhook ID: {webhook.provider_webhook_id})"
)
diff --git a/autogpt_platform/backend/backend/integrations/webhooks/utils.py b/autogpt_platform/backend/backend/integrations/webhooks/utils.py
index 79316c4c0e..ffe910a2eb 100644
--- a/autogpt_platform/backend/backend/integrations/webhooks/utils.py
+++ b/autogpt_platform/backend/backend/integrations/webhooks/utils.py
@@ -9,7 +9,7 @@ from backend.util.settings import Config
from . import get_webhook_manager, supports_webhooks
if TYPE_CHECKING:
- from backend.data.block import AnyBlockSchema
+ from backend.blocks._base import AnyBlockSchema
from backend.data.integrations import Webhook
from backend.data.model import Credentials
from backend.integrations.providers import ProviderName
@@ -42,7 +42,7 @@ async def setup_webhook_for_block(
Webhook: The created or found webhook object, if successful.
str: A feedback message, if any required inputs are missing.
"""
- from backend.data.block import BlockWebhookConfig
+ from backend.blocks._base import BlockWebhookConfig
if not (trigger_base_config := trigger_block.webhook_config):
raise ValueError(f"Block #{trigger_block.id} does not have a webhook_config")
diff --git a/autogpt_platform/backend/backend/monitoring/block_error_monitor.py b/autogpt_platform/backend/backend/monitoring/block_error_monitor.py
index ffd2ffc888..07565a37e8 100644
--- a/autogpt_platform/backend/backend/monitoring/block_error_monitor.py
+++ b/autogpt_platform/backend/backend/monitoring/block_error_monitor.py
@@ -6,7 +6,7 @@ from datetime import datetime, timedelta, timezone
from pydantic import BaseModel
-from backend.data.block import get_block
+from backend.blocks import get_block
from backend.data.execution import ExecutionStatus, NodeExecutionResult
from backend.util.clients import (
get_database_manager_client,
diff --git a/autogpt_platform/backend/backend/sdk/__init__.py b/autogpt_platform/backend/backend/sdk/__init__.py
index b3a23dc735..dc7260d08f 100644
--- a/autogpt_platform/backend/backend/sdk/__init__.py
+++ b/autogpt_platform/backend/backend/sdk/__init__.py
@@ -17,7 +17,7 @@ This module provides:
from pydantic import BaseModel, Field, SecretStr
# === CORE BLOCK SYSTEM ===
-from backend.data.block import (
+from backend.blocks._base import (
Block,
BlockCategory,
BlockManualWebhookConfig,
@@ -65,7 +65,7 @@ except ImportError:
# Cost System
try:
- from backend.data.block import BlockCost, BlockCostType
+ from backend.blocks._base import BlockCost, BlockCostType
except ImportError:
from backend.data.block_cost_config import BlockCost, BlockCostType
diff --git a/autogpt_platform/backend/backend/sdk/builder.py b/autogpt_platform/backend/backend/sdk/builder.py
index 09949b256f..28dd4023f0 100644
--- a/autogpt_platform/backend/backend/sdk/builder.py
+++ b/autogpt_platform/backend/backend/sdk/builder.py
@@ -8,7 +8,7 @@ from typing import Callable, List, Optional, Type
from pydantic import SecretStr
-from backend.data.block import BlockCost, BlockCostType
+from backend.blocks._base import BlockCost, BlockCostType
from backend.data.model import (
APIKeyCredentials,
Credentials,
diff --git a/autogpt_platform/backend/backend/sdk/cost_integration.py b/autogpt_platform/backend/backend/sdk/cost_integration.py
index 04c027ffa3..2eec1aece0 100644
--- a/autogpt_platform/backend/backend/sdk/cost_integration.py
+++ b/autogpt_platform/backend/backend/sdk/cost_integration.py
@@ -8,7 +8,7 @@ BLOCK_COSTS configuration used by the execution system.
import logging
from typing import List, Type
-from backend.data.block import Block, BlockCost
+from backend.blocks._base import Block, BlockCost
from backend.data.block_cost_config import BLOCK_COSTS
from backend.sdk.registry import AutoRegistry
diff --git a/autogpt_platform/backend/backend/sdk/provider.py b/autogpt_platform/backend/backend/sdk/provider.py
index 98afbf05d5..2933121703 100644
--- a/autogpt_platform/backend/backend/sdk/provider.py
+++ b/autogpt_platform/backend/backend/sdk/provider.py
@@ -7,7 +7,7 @@ from typing import Any, Callable, List, Optional, Set, Type
from pydantic import BaseModel, SecretStr
-from backend.data.block import BlockCost
+from backend.blocks._base import BlockCost
from backend.data.model import (
APIKeyCredentials,
Credentials,
diff --git a/autogpt_platform/backend/backend/util/test.py b/autogpt_platform/backend/backend/util/test.py
index 23d7c24147..279b3142a4 100644
--- a/autogpt_platform/backend/backend/util/test.py
+++ b/autogpt_platform/backend/backend/util/test.py
@@ -8,8 +8,9 @@ from typing import Sequence, cast
from autogpt_libs.auth import get_user_id
from backend.api.rest_api import AgentServer
+from backend.blocks._base import Block, BlockSchema
from backend.data import db
-from backend.data.block import Block, BlockSchema, initialize_blocks
+from backend.data.block import initialize_blocks
from backend.data.execution import (
ExecutionContext,
ExecutionStatus,
diff --git a/autogpt_platform/backend/scripts/generate_block_docs.py b/autogpt_platform/backend/scripts/generate_block_docs.py
index bb60eddb5d..25ad0a3be7 100644
--- a/autogpt_platform/backend/scripts/generate_block_docs.py
+++ b/autogpt_platform/backend/scripts/generate_block_docs.py
@@ -24,7 +24,10 @@ import sys
from collections import defaultdict
from dataclasses import dataclass, field
from pathlib import Path
-from typing import Any
+from typing import TYPE_CHECKING, Any, Type
+
+if TYPE_CHECKING:
+ from backend.blocks._base import AnyBlockSchema
# Add backend to path for imports
backend_dir = Path(__file__).parent.parent
@@ -242,9 +245,9 @@ def file_path_to_title(file_path: str) -> str:
return apply_fixes(name.replace("_", " ").title())
-def extract_block_doc(block_cls: type) -> BlockDoc:
+def extract_block_doc(block_cls: Type["AnyBlockSchema"]) -> BlockDoc:
"""Extract documentation data from a block class."""
- block = block_cls.create()
+ block = block_cls()
# Get source file
try:
@@ -520,7 +523,7 @@ def generate_overview_table(blocks: list[BlockDoc], block_dir_prefix: str = "")
lines.append("")
# Group blocks by category
- by_category = defaultdict(list)
+ by_category = defaultdict[str, list[BlockDoc]](list)
for block in blocks:
primary_cat = block.categories[0] if block.categories else "BASIC"
by_category[primary_cat].append(block)
diff --git a/autogpt_platform/backend/test/load_store_agents.py b/autogpt_platform/backend/test/load_store_agents.py
index b9d8e0478e..dfc5beb453 100644
--- a/autogpt_platform/backend/test/load_store_agents.py
+++ b/autogpt_platform/backend/test/load_store_agents.py
@@ -49,7 +49,7 @@ async def initialize_blocks(db: Prisma) -> set[str]:
Returns a set of block IDs that exist in the database.
"""
- from backend.data.block import get_blocks
+ from backend.blocks import get_blocks
print(" Initializing agent blocks...")
blocks = get_blocks()
diff --git a/autogpt_platform/backend/test/sdk/test_sdk_registry.py b/autogpt_platform/backend/test/sdk/test_sdk_registry.py
index f82abd57cb..ab384ca955 100644
--- a/autogpt_platform/backend/test/sdk/test_sdk_registry.py
+++ b/autogpt_platform/backend/test/sdk/test_sdk_registry.py
@@ -377,7 +377,7 @@ class TestProviderBuilder:
def test_provider_builder_with_base_cost(self):
"""Test building a provider with base costs."""
- from backend.data.block import BlockCostType
+ from backend.blocks._base import BlockCostType
provider = (
ProviderBuilder("cost_test")
@@ -418,7 +418,7 @@ class TestProviderBuilder:
def test_provider_builder_complete_example(self):
"""Test building a complete provider with all features."""
- from backend.data.block import BlockCostType
+ from backend.blocks._base import BlockCostType
class TestOAuth(BaseOAuthHandler):
PROVIDER_NAME = ProviderName.GITHUB
diff --git a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx
index 67b3cad9af..babe10b912 100644
--- a/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx
+++ b/autogpt_platform/frontend/src/app/(platform)/build/components/legacy-builder/Flow/Flow.tsx
@@ -1137,7 +1137,7 @@ const FlowEditor: React.FC<{
You are building a Trigger Agent
Your agent{" "}
- {savedAgent?.nodes.some((node) => node.webhook)
+ {savedAgent?.nodes.some((node) => node.webhook_id)
? "is listening"
: "will listen"}{" "}
for its trigger and will run when the time is right.
diff --git a/autogpt_platform/frontend/src/app/api/openapi.json b/autogpt_platform/frontend/src/app/api/openapi.json
index fff3fc7785..496a714ba5 100644
--- a/autogpt_platform/frontend/src/app/api/openapi.json
+++ b/autogpt_platform/frontend/src/app/api/openapi.json
@@ -9497,12 +9497,6 @@
"webhook_id": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Webhook Id"
- },
- "webhook": {
- "anyOf": [
- { "$ref": "#/components/schemas/Webhook" },
- { "type": "null" }
- ]
}
},
"type": "object",
diff --git a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts
index 44fb25dbfc..65625f1cfb 100644
--- a/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts
+++ b/autogpt_platform/frontend/src/lib/autogpt-server-api/types.ts
@@ -27,7 +27,7 @@ export type BlockCost = {
cost_filter: Record;
};
-/* Mirror of backend/data/block.py:Block */
+/* Mirror of backend/blocks/_base.py:Block */
export type Block = {
id: string;
name: string;
@@ -292,7 +292,7 @@ export type NodeCreatable = {
export type Node = NodeCreatable & {
input_links: Link[];
output_links: Link[];
- webhook?: Webhook;
+ webhook_id?: string | null;
};
/* Mirror of backend/data/graph.py:Link */
diff --git a/docs/platform/new_blocks.md b/docs/platform/new_blocks.md
index 114ff8d9a4..c84f864684 100644
--- a/docs/platform/new_blocks.md
+++ b/docs/platform/new_blocks.md
@@ -20,13 +20,13 @@ Follow these steps to create and test a new block:
Every block should contain the following:
```python
- from backend.data.block import Block, BlockSchemaInput, BlockSchemaOutput, BlockOutput
+ from backend.blocks._base import Block, BlockSchemaInput, BlockSchemaOutput, BlockOutput
```
Example for the Wikipedia summary block:
```python
- from backend.data.block import Block, BlockSchemaInput, BlockSchemaOutput, BlockOutput
+ from backend.blocks._base import Block, BlockSchemaInput, BlockSchemaOutput, BlockOutput
from backend.utils.get_request import GetRequest
import requests
@@ -237,7 +237,7 @@ from backend.data.model import (
Credentials,
)
-from backend.data.block import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
+from backend.blocks._base import Block, BlockOutput, BlockSchemaInput, BlockSchemaOutput
from backend.data.model import CredentialsField
from backend.integrations.providers import ProviderName
@@ -496,8 +496,8 @@ To create a webhook-triggered block, follow these additional steps on top of the
BlockWebhookConfig definition
- ```python title="backend/data/block.py"
- --8<-- "autogpt_platform/backend/backend/data/block.py:BlockWebhookConfig"
+ ```python title="backend/blocks/_base.py"
+ --8<-- "autogpt_platform/backend/backend/blocks/_base.py:BlockWebhookConfig"
```