Compare commits
39 Commits
ntindle-pa
...
toggle-cor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
50f39e55f7 | ||
|
|
e70c970ab6 | ||
|
|
3bbce71678 | ||
|
|
34fbf4377f | ||
|
|
f682ef885a | ||
|
|
2ffd249aac | ||
|
|
986245ec43 | ||
|
|
f89717153f | ||
|
|
5da41e0753 | ||
|
|
cddeb185a8 | ||
|
|
08a3fd6d26 | ||
|
|
39b30bc82c | ||
|
|
2df0e2b750 | ||
|
|
925f249ce1 | ||
|
|
e8cf3edbf4 | ||
|
|
dc03ea718c | ||
|
|
dbee580d80 | ||
|
|
0325ec0a2c | ||
|
|
3952a1a226 | ||
|
|
cfc975d39b | ||
|
|
46e0f6cc45 | ||
|
|
c03af5c196 | ||
|
|
00cbfb8f80 | ||
|
|
3beafae955 | ||
|
|
9cd186a2f3 | ||
|
|
dcf26bd3d4 | ||
|
|
b97f097c9d | ||
|
|
5be6987d58 | ||
|
|
4bcc73f784 | ||
|
|
c8240a4d6b | ||
|
|
82618aede0 | ||
|
|
c4483fa6c7 | ||
|
|
c2af8c1a6a | ||
|
|
483c399812 | ||
|
|
260dd526c9 | ||
|
|
75a159db01 | ||
|
|
62032e6584 | ||
|
|
105d5dc7e9 | ||
|
|
92515b3683 |
3
.github/workflows/claude.yml
vendored
@@ -319,6 +319,7 @@ jobs:
|
||||
with:
|
||||
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
claude_args: |
|
||||
--allowedTools "Bash(npm:*),Bash(pnpm:*),Bash(poetry:*),Bash(git:*),Edit,Replace,NotebookEditCell,mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*), Bash(gh pr diff:*), Bash(gh pr view:*)"
|
||||
--allowedTools "Bash(npm:*),Bash(pnpm:*),Bash(poetry:*),Bash(git:*),Edit,Replace,NotebookEditCell,mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*), Bash(gh pr diff:*), Bash(gh pr view:*), Bash(gh pr edit:*)"
|
||||
--model opus
|
||||
additional_permissions: |
|
||||
actions: read
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
import hashlib
|
||||
import secrets
|
||||
from typing import NamedTuple
|
||||
|
||||
|
||||
class APIKeyContainer(NamedTuple):
|
||||
"""Container for API key parts."""
|
||||
|
||||
raw: str
|
||||
prefix: str
|
||||
postfix: str
|
||||
hash: str
|
||||
|
||||
|
||||
class APIKeyManager:
|
||||
PREFIX: str = "agpt_"
|
||||
PREFIX_LENGTH: int = 8
|
||||
POSTFIX_LENGTH: int = 8
|
||||
|
||||
def generate_api_key(self) -> APIKeyContainer:
|
||||
"""Generate a new API key with all its parts."""
|
||||
raw_key = f"{self.PREFIX}{secrets.token_urlsafe(32)}"
|
||||
return APIKeyContainer(
|
||||
raw=raw_key,
|
||||
prefix=raw_key[: self.PREFIX_LENGTH],
|
||||
postfix=raw_key[-self.POSTFIX_LENGTH :],
|
||||
hash=hashlib.sha256(raw_key.encode()).hexdigest(),
|
||||
)
|
||||
|
||||
def verify_api_key(self, provided_key: str, stored_hash: str) -> bool:
|
||||
"""Verify if a provided API key matches the stored hash."""
|
||||
if not provided_key.startswith(self.PREFIX):
|
||||
return False
|
||||
provided_hash = hashlib.sha256(provided_key.encode()).hexdigest()
|
||||
return secrets.compare_digest(provided_hash, stored_hash)
|
||||
@@ -0,0 +1,78 @@
|
||||
import hashlib
|
||||
import secrets
|
||||
from typing import NamedTuple
|
||||
|
||||
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
|
||||
|
||||
|
||||
class APIKeyContainer(NamedTuple):
|
||||
"""Container for API key parts."""
|
||||
|
||||
key: str
|
||||
head: str
|
||||
tail: str
|
||||
hash: str
|
||||
salt: str
|
||||
|
||||
|
||||
class APIKeySmith:
|
||||
PREFIX: str = "agpt_"
|
||||
HEAD_LENGTH: int = 8
|
||||
TAIL_LENGTH: int = 8
|
||||
|
||||
def generate_key(self) -> APIKeyContainer:
|
||||
"""Generate a new API key with secure hashing."""
|
||||
raw_key = f"{self.PREFIX}{secrets.token_urlsafe(32)}"
|
||||
hash, salt = self.hash_key(raw_key)
|
||||
|
||||
return APIKeyContainer(
|
||||
key=raw_key,
|
||||
head=raw_key[: self.HEAD_LENGTH],
|
||||
tail=raw_key[-self.TAIL_LENGTH :],
|
||||
hash=hash,
|
||||
salt=salt,
|
||||
)
|
||||
|
||||
def verify_key(
|
||||
self, provided_key: str, known_hash: str, known_salt: str | None = None
|
||||
) -> bool:
|
||||
"""
|
||||
Verify an API key against a known hash (+ salt).
|
||||
Supports verifying both legacy SHA256 and secure Scrypt hashes.
|
||||
"""
|
||||
if not provided_key.startswith(self.PREFIX):
|
||||
return False
|
||||
|
||||
# Handle legacy SHA256 hashes (migration support)
|
||||
if known_salt is None:
|
||||
legacy_hash = hashlib.sha256(provided_key.encode()).hexdigest()
|
||||
return secrets.compare_digest(legacy_hash, known_hash)
|
||||
|
||||
try:
|
||||
salt_bytes = bytes.fromhex(known_salt)
|
||||
provided_hash = self._hash_key_with_salt(provided_key, salt_bytes)
|
||||
return secrets.compare_digest(provided_hash, known_hash)
|
||||
except (ValueError, TypeError):
|
||||
return False
|
||||
|
||||
def hash_key(self, raw_key: str) -> tuple[str, str]:
|
||||
"""Migrate a legacy hash to secure hash format."""
|
||||
salt = self._generate_salt()
|
||||
hash = self._hash_key_with_salt(raw_key, salt)
|
||||
return hash, salt.hex()
|
||||
|
||||
def _generate_salt(self) -> bytes:
|
||||
"""Generate a random salt for hashing."""
|
||||
return secrets.token_bytes(32)
|
||||
|
||||
def _hash_key_with_salt(self, raw_key: str, salt: bytes) -> str:
|
||||
"""Hash API key using Scrypt with salt."""
|
||||
kdf = Scrypt(
|
||||
length=32,
|
||||
salt=salt,
|
||||
n=2**14, # CPU/memory cost parameter
|
||||
r=8, # Block size parameter
|
||||
p=1, # Parallelization parameter
|
||||
)
|
||||
key_hash = kdf.derive(raw_key.encode())
|
||||
return key_hash.hex()
|
||||
@@ -0,0 +1,79 @@
|
||||
import hashlib
|
||||
|
||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||
|
||||
|
||||
def test_generate_api_key():
|
||||
keysmith = APIKeySmith()
|
||||
key = keysmith.generate_key()
|
||||
|
||||
assert key.key.startswith(keysmith.PREFIX)
|
||||
assert key.head == key.key[: keysmith.HEAD_LENGTH]
|
||||
assert key.tail == key.key[-keysmith.TAIL_LENGTH :]
|
||||
assert len(key.hash) == 64 # 32 bytes hex encoded
|
||||
assert len(key.salt) == 64 # 32 bytes hex encoded
|
||||
|
||||
|
||||
def test_verify_new_secure_key():
|
||||
keysmith = APIKeySmith()
|
||||
key = keysmith.generate_key()
|
||||
|
||||
# Test correct key validates
|
||||
assert keysmith.verify_key(key.key, key.hash, key.salt) is True
|
||||
|
||||
# Test wrong key fails
|
||||
wrong_key = f"{keysmith.PREFIX}wrongkey123"
|
||||
assert keysmith.verify_key(wrong_key, key.hash, key.salt) is False
|
||||
|
||||
|
||||
def test_verify_legacy_key():
|
||||
keysmith = APIKeySmith()
|
||||
legacy_key = f"{keysmith.PREFIX}legacykey123"
|
||||
legacy_hash = hashlib.sha256(legacy_key.encode()).hexdigest()
|
||||
|
||||
# Test legacy key validates without salt
|
||||
assert keysmith.verify_key(legacy_key, legacy_hash) is True
|
||||
|
||||
# Test wrong legacy key fails
|
||||
wrong_key = f"{keysmith.PREFIX}wronglegacy"
|
||||
assert keysmith.verify_key(wrong_key, legacy_hash) is False
|
||||
|
||||
|
||||
def test_rehash_existing_key():
|
||||
keysmith = APIKeySmith()
|
||||
legacy_key = f"{keysmith.PREFIX}migratekey123"
|
||||
|
||||
# Migrate the legacy key
|
||||
new_hash, new_salt = keysmith.hash_key(legacy_key)
|
||||
|
||||
# Verify migrated key works
|
||||
assert keysmith.verify_key(legacy_key, new_hash, new_salt) is True
|
||||
|
||||
# Verify different key fails with migrated hash
|
||||
wrong_key = f"{keysmith.PREFIX}wrongkey"
|
||||
assert keysmith.verify_key(wrong_key, new_hash, new_salt) is False
|
||||
|
||||
|
||||
def test_invalid_key_prefix():
|
||||
keysmith = APIKeySmith()
|
||||
key = keysmith.generate_key()
|
||||
|
||||
# Test key without proper prefix fails
|
||||
invalid_key = "invalid_prefix_key"
|
||||
assert keysmith.verify_key(invalid_key, key.hash, key.salt) is False
|
||||
|
||||
|
||||
def test_secure_hash_requires_salt():
|
||||
keysmith = APIKeySmith()
|
||||
key = keysmith.generate_key()
|
||||
|
||||
# Secure hash without salt should fail
|
||||
assert keysmith.verify_key(key.key, key.hash) is False
|
||||
|
||||
|
||||
def test_invalid_salt_format():
|
||||
keysmith = APIKeySmith()
|
||||
key = keysmith.generate_key()
|
||||
|
||||
# Invalid salt format should fail gracefully
|
||||
assert keysmith.verify_key(key.key, key.hash, "invalid_hex") is False
|
||||
36
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -1002,6 +1002,18 @@ dynamodb = ["boto3 (>=1.9.71)"]
|
||||
redis = ["redis (>=2.10.5)"]
|
||||
test-filesource = ["pyyaml (>=5.3.1)", "watchdog (>=3.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.9.1"
|
||||
description = "Node.js virtual environment builder"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
|
||||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "opentelemetry-api"
|
||||
version = "1.35.0"
|
||||
@@ -1347,6 +1359,27 @@ files = [
|
||||
{file = "pyrfc3339-2.0.1.tar.gz", hash = "sha256:e47843379ea35c1296c3b6c67a948a1a490ae0584edfcbdea0eaffb5dd29960b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.404"
|
||||
description = "Command line wrapper for pyright"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "pyright-1.1.404-py3-none-any.whl", hash = "sha256:c7b7ff1fdb7219c643079e4c3e7d4125f0dafcc19d253b47e898d130ea426419"},
|
||||
{file = "pyright-1.1.404.tar.gz", hash = "sha256:455e881a558ca6be9ecca0b30ce08aa78343ecc031d37a198ffa9a7a1abeb63e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
nodeenv = ">=1.6.0"
|
||||
typing-extensions = ">=4.1"
|
||||
|
||||
[package.extras]
|
||||
all = ["nodejs-wheel-binaries", "twine (>=3.4.1)"]
|
||||
dev = ["twine (>=3.4.1)"]
|
||||
nodejs = ["nodejs-wheel-binaries"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.4.1"
|
||||
@@ -1740,7 +1773,6 @@ files = [
|
||||
{file = "typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76"},
|
||||
{file = "typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36"},
|
||||
]
|
||||
markers = {dev = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
@@ -1897,4 +1929,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "d841f62f95180f6ad63ce82ed8e62aa201b9bf89242cc9299ae0f26ff1f72136"
|
||||
content-hash = "0c40b63c3c921846cf05ccfb4e685d4959854b29c2c302245f9832e20aac6954"
|
||||
|
||||
@@ -9,6 +9,7 @@ packages = [{ include = "autogpt_libs" }]
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
cryptography = "^45.0"
|
||||
expiringdict = "^1.2.2"
|
||||
fastapi = "^0.116.1"
|
||||
google-cloud-logging = "^3.12.1"
|
||||
@@ -21,11 +22,12 @@ supabase = "^2.16.0"
|
||||
uvicorn = "^0.35.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "^0.12.11"
|
||||
pyright = "^1.1.404"
|
||||
pytest = "^8.4.1"
|
||||
pytest-asyncio = "^1.1.0"
|
||||
pytest-mock = "^3.14.1"
|
||||
pytest-cov = "^6.2.1"
|
||||
ruff = "^0.12.11"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -661,6 +661,167 @@ async def update_field(
|
||||
#################################################################
|
||||
|
||||
|
||||
async def get_table_schema(
|
||||
credentials: Credentials,
|
||||
base_id: str,
|
||||
table_id_or_name: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Get the schema for a specific table, including all field definitions.
|
||||
|
||||
Args:
|
||||
credentials: Airtable API credentials
|
||||
base_id: The base ID
|
||||
table_id_or_name: The table ID or name
|
||||
|
||||
Returns:
|
||||
Dict containing table schema with fields information
|
||||
"""
|
||||
# First get all tables to find the right one
|
||||
response = await Requests().get(
|
||||
f"https://api.airtable.com/v0/meta/bases/{base_id}/tables",
|
||||
headers={"Authorization": credentials.auth_header()},
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
tables = data.get("tables", [])
|
||||
|
||||
# Find the matching table
|
||||
for table in tables:
|
||||
if table.get("id") == table_id_or_name or table.get("name") == table_id_or_name:
|
||||
return table
|
||||
|
||||
raise ValueError(f"Table '{table_id_or_name}' not found in base '{base_id}'")
|
||||
|
||||
|
||||
def get_empty_value_for_field(field_type: str) -> Any:
|
||||
"""
|
||||
Return the appropriate empty value for a given Airtable field type.
|
||||
|
||||
Args:
|
||||
field_type: The Airtable field type
|
||||
|
||||
Returns:
|
||||
The appropriate empty value for that field type
|
||||
"""
|
||||
# Fields that should be false when empty
|
||||
if field_type == "checkbox":
|
||||
return False
|
||||
|
||||
# Fields that should be empty arrays
|
||||
if field_type in [
|
||||
"multipleSelects",
|
||||
"multipleRecordLinks",
|
||||
"multipleAttachments",
|
||||
"multipleLookupValues",
|
||||
"multipleCollaborators",
|
||||
]:
|
||||
return []
|
||||
|
||||
# Fields that should be 0 when empty (numeric types)
|
||||
if field_type in [
|
||||
"number",
|
||||
"percent",
|
||||
"currency",
|
||||
"rating",
|
||||
"duration",
|
||||
"count",
|
||||
"autoNumber",
|
||||
]:
|
||||
return 0
|
||||
|
||||
# Fields that should be empty strings
|
||||
if field_type in [
|
||||
"singleLineText",
|
||||
"multilineText",
|
||||
"email",
|
||||
"url",
|
||||
"phoneNumber",
|
||||
"richText",
|
||||
"barcode",
|
||||
]:
|
||||
return ""
|
||||
|
||||
# Everything else gets null (dates, single selects, formulas, etc.)
|
||||
return None
|
||||
|
||||
|
||||
async def normalize_records(
|
||||
records: list[dict],
|
||||
table_schema: dict,
|
||||
include_field_metadata: bool = False,
|
||||
) -> dict:
|
||||
"""
|
||||
Normalize Airtable records to include all fields with proper empty values.
|
||||
|
||||
Args:
|
||||
records: List of record objects from Airtable API
|
||||
table_schema: Table schema containing field definitions
|
||||
include_field_metadata: Whether to include field metadata in response
|
||||
|
||||
Returns:
|
||||
Dict with normalized records and optionally field metadata
|
||||
"""
|
||||
fields = table_schema.get("fields", [])
|
||||
|
||||
# Normalize each record
|
||||
normalized_records = []
|
||||
for record in records:
|
||||
normalized = {
|
||||
"id": record.get("id"),
|
||||
"createdTime": record.get("createdTime"),
|
||||
"fields": {},
|
||||
}
|
||||
|
||||
# Add existing fields
|
||||
existing_fields = record.get("fields", {})
|
||||
|
||||
# Add all fields from schema, using empty values for missing ones
|
||||
for field in fields:
|
||||
field_name = field["name"]
|
||||
field_type = field["type"]
|
||||
|
||||
if field_name in existing_fields:
|
||||
# Field exists, use its value
|
||||
normalized["fields"][field_name] = existing_fields[field_name]
|
||||
else:
|
||||
# Field is missing, add appropriate empty value
|
||||
normalized["fields"][field_name] = get_empty_value_for_field(field_type)
|
||||
|
||||
normalized_records.append(normalized)
|
||||
|
||||
# Build result dictionary
|
||||
if include_field_metadata:
|
||||
field_metadata = {}
|
||||
for field in fields:
|
||||
metadata = {"type": field["type"], "id": field["id"]}
|
||||
|
||||
# Add type-specific metadata
|
||||
options = field.get("options", {})
|
||||
if field["type"] == "currency" and "symbol" in options:
|
||||
metadata["symbol"] = options["symbol"]
|
||||
metadata["precision"] = options.get("precision", 2)
|
||||
elif field["type"] == "duration" and "durationFormat" in options:
|
||||
metadata["format"] = options["durationFormat"]
|
||||
elif field["type"] == "percent" and "precision" in options:
|
||||
metadata["precision"] = options["precision"]
|
||||
elif (
|
||||
field["type"] in ["singleSelect", "multipleSelects"]
|
||||
and "choices" in options
|
||||
):
|
||||
metadata["choices"] = [choice["name"] for choice in options["choices"]]
|
||||
elif field["type"] == "rating" and "max" in options:
|
||||
metadata["max"] = options["max"]
|
||||
metadata["icon"] = options.get("icon", "star")
|
||||
metadata["color"] = options.get("color", "yellowBright")
|
||||
|
||||
field_metadata[field["name"]] = metadata
|
||||
|
||||
return {"records": normalized_records, "field_metadata": field_metadata}
|
||||
else:
|
||||
return {"records": normalized_records}
|
||||
|
||||
|
||||
async def list_records(
|
||||
credentials: Credentials,
|
||||
base_id: str,
|
||||
@@ -1249,3 +1410,26 @@ async def list_bases(
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
async def get_base_tables(
|
||||
credentials: Credentials,
|
||||
base_id: str,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Get all tables for a specific base.
|
||||
|
||||
Args:
|
||||
credentials: Airtable API credentials
|
||||
base_id: The ID of the base
|
||||
|
||||
Returns:
|
||||
list[dict]: List of table objects with their schemas
|
||||
"""
|
||||
response = await Requests().get(
|
||||
f"https://api.airtable.com/v0/meta/bases/{base_id}/tables",
|
||||
headers={"Authorization": credentials.auth_header()},
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
return data.get("tables", [])
|
||||
|
||||
@@ -14,13 +14,13 @@ from backend.sdk import (
|
||||
SchemaField,
|
||||
)
|
||||
|
||||
from ._api import create_base, list_bases
|
||||
from ._api import create_base, get_base_tables, list_bases
|
||||
from ._config import airtable
|
||||
|
||||
|
||||
class AirtableCreateBaseBlock(Block):
|
||||
"""
|
||||
Creates a new base in an Airtable workspace.
|
||||
Creates a new base in an Airtable workspace, or returns existing base if one with the same name exists.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
@@ -31,6 +31,10 @@ class AirtableCreateBaseBlock(Block):
|
||||
description="The workspace ID where the base will be created"
|
||||
)
|
||||
name: str = SchemaField(description="The name of the new base")
|
||||
find_existing: bool = SchemaField(
|
||||
description="If true, return existing base with same name instead of creating duplicate",
|
||||
default=True,
|
||||
)
|
||||
tables: list[dict] = SchemaField(
|
||||
description="At least one table and field must be specified. Array of table objects to create in the base. Each table should have 'name' and 'fields' properties",
|
||||
default=[
|
||||
@@ -50,14 +54,18 @@ class AirtableCreateBaseBlock(Block):
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
base_id: str = SchemaField(description="The ID of the created base")
|
||||
base_id: str = SchemaField(description="The ID of the created or found base")
|
||||
tables: list[dict] = SchemaField(description="Array of table objects")
|
||||
table: dict = SchemaField(description="A single table object")
|
||||
was_created: bool = SchemaField(
|
||||
description="True if a new base was created, False if existing was found",
|
||||
default=True,
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f59b88a8-54ce-4676-a508-fd614b4e8dce",
|
||||
description="Create a new base in Airtable",
|
||||
description="Create or find a base in Airtable",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
@@ -66,6 +74,31 @@ class AirtableCreateBaseBlock(Block):
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
# If find_existing is true, check if a base with this name already exists
|
||||
if input_data.find_existing:
|
||||
# List all bases to check for existing one with same name
|
||||
# Note: Airtable API doesn't have a direct search, so we need to list and filter
|
||||
existing_bases = await list_bases(credentials)
|
||||
|
||||
for base in existing_bases.get("bases", []):
|
||||
if base.get("name") == input_data.name:
|
||||
# Base already exists, return it
|
||||
base_id = base.get("id")
|
||||
yield "base_id", base_id
|
||||
yield "was_created", False
|
||||
|
||||
# Get the tables for this base
|
||||
try:
|
||||
tables = await get_base_tables(credentials, base_id)
|
||||
yield "tables", tables
|
||||
for table in tables:
|
||||
yield "table", table
|
||||
except Exception:
|
||||
# If we can't get tables, return empty list
|
||||
yield "tables", []
|
||||
return
|
||||
|
||||
# No existing base found or find_existing is false, create new one
|
||||
data = await create_base(
|
||||
credentials,
|
||||
input_data.workspace_id,
|
||||
@@ -74,6 +107,7 @@ class AirtableCreateBaseBlock(Block):
|
||||
)
|
||||
|
||||
yield "base_id", data.get("id", None)
|
||||
yield "was_created", True
|
||||
yield "tables", data.get("tables", [])
|
||||
for table in data.get("tables", []):
|
||||
yield "table", table
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Airtable record operation blocks.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
from typing import Optional, cast
|
||||
|
||||
from backend.sdk import (
|
||||
APIKeyCredentials,
|
||||
@@ -18,7 +18,9 @@ from ._api import (
|
||||
create_record,
|
||||
delete_multiple_records,
|
||||
get_record,
|
||||
get_table_schema,
|
||||
list_records,
|
||||
normalize_records,
|
||||
update_multiple_records,
|
||||
)
|
||||
from ._config import airtable
|
||||
@@ -54,12 +56,24 @@ class AirtableListRecordsBlock(Block):
|
||||
return_fields: list[str] = SchemaField(
|
||||
description="Specific fields to return (comma-separated)", default=[]
|
||||
)
|
||||
normalize_output: bool = SchemaField(
|
||||
description="Normalize output to include all fields with proper empty values (disable to skip schema fetch and get raw Airtable response)",
|
||||
default=True,
|
||||
)
|
||||
include_field_metadata: bool = SchemaField(
|
||||
description="Include field type and configuration metadata (requires normalize_output=true)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
records: list[dict] = SchemaField(description="Array of record objects")
|
||||
offset: Optional[str] = SchemaField(
|
||||
description="Offset for next page (null if no more records)", default=None
|
||||
)
|
||||
field_metadata: Optional[dict] = SchemaField(
|
||||
description="Field type and configuration metadata (only when include_field_metadata=true)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -73,6 +87,7 @@ class AirtableListRecordsBlock(Block):
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
|
||||
data = await list_records(
|
||||
credentials,
|
||||
input_data.base_id,
|
||||
@@ -88,8 +103,33 @@ class AirtableListRecordsBlock(Block):
|
||||
fields=input_data.return_fields if input_data.return_fields else None,
|
||||
)
|
||||
|
||||
yield "records", data.get("records", [])
|
||||
yield "offset", data.get("offset", None)
|
||||
records = data.get("records", [])
|
||||
|
||||
# Normalize output if requested
|
||||
if input_data.normalize_output:
|
||||
# Fetch table schema
|
||||
table_schema = await get_table_schema(
|
||||
credentials, input_data.base_id, input_data.table_id_or_name
|
||||
)
|
||||
|
||||
# Normalize the records
|
||||
normalized_data = await normalize_records(
|
||||
records,
|
||||
table_schema,
|
||||
include_field_metadata=input_data.include_field_metadata,
|
||||
)
|
||||
|
||||
yield "records", normalized_data["records"]
|
||||
yield "offset", data.get("offset", None)
|
||||
|
||||
if (
|
||||
input_data.include_field_metadata
|
||||
and "field_metadata" in normalized_data
|
||||
):
|
||||
yield "field_metadata", normalized_data["field_metadata"]
|
||||
else:
|
||||
yield "records", records
|
||||
yield "offset", data.get("offset", None)
|
||||
|
||||
|
||||
class AirtableGetRecordBlock(Block):
|
||||
@@ -104,11 +144,23 @@ class AirtableGetRecordBlock(Block):
|
||||
base_id: str = SchemaField(description="The Airtable base ID")
|
||||
table_id_or_name: str = SchemaField(description="Table ID or name")
|
||||
record_id: str = SchemaField(description="The record ID to retrieve")
|
||||
normalize_output: bool = SchemaField(
|
||||
description="Normalize output to include all fields with proper empty values (disable to skip schema fetch and get raw Airtable response)",
|
||||
default=True,
|
||||
)
|
||||
include_field_metadata: bool = SchemaField(
|
||||
description="Include field type and configuration metadata (requires normalize_output=true)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="The record ID")
|
||||
fields: dict = SchemaField(description="The record fields")
|
||||
created_time: str = SchemaField(description="The record created time")
|
||||
field_metadata: Optional[dict] = SchemaField(
|
||||
description="Field type and configuration metadata (only when include_field_metadata=true)",
|
||||
default=None,
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -122,6 +174,7 @@ class AirtableGetRecordBlock(Block):
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
|
||||
record = await get_record(
|
||||
credentials,
|
||||
input_data.base_id,
|
||||
@@ -129,9 +182,34 @@ class AirtableGetRecordBlock(Block):
|
||||
input_data.record_id,
|
||||
)
|
||||
|
||||
yield "id", record.get("id", None)
|
||||
yield "fields", record.get("fields", None)
|
||||
yield "created_time", record.get("createdTime", None)
|
||||
# Normalize output if requested
|
||||
if input_data.normalize_output:
|
||||
# Fetch table schema
|
||||
table_schema = await get_table_schema(
|
||||
credentials, input_data.base_id, input_data.table_id_or_name
|
||||
)
|
||||
|
||||
# Normalize the single record (wrap in list and unwrap result)
|
||||
normalized_data = await normalize_records(
|
||||
[record],
|
||||
table_schema,
|
||||
include_field_metadata=input_data.include_field_metadata,
|
||||
)
|
||||
|
||||
normalized_record = normalized_data["records"][0]
|
||||
yield "id", normalized_record.get("id", None)
|
||||
yield "fields", normalized_record.get("fields", None)
|
||||
yield "created_time", normalized_record.get("createdTime", None)
|
||||
|
||||
if (
|
||||
input_data.include_field_metadata
|
||||
and "field_metadata" in normalized_data
|
||||
):
|
||||
yield "field_metadata", normalized_data["field_metadata"]
|
||||
else:
|
||||
yield "id", record.get("id", None)
|
||||
yield "fields", record.get("fields", None)
|
||||
yield "created_time", record.get("createdTime", None)
|
||||
|
||||
|
||||
class AirtableCreateRecordsBlock(Block):
|
||||
@@ -148,6 +226,10 @@ class AirtableCreateRecordsBlock(Block):
|
||||
records: list[dict] = SchemaField(
|
||||
description="Array of records to create (each with 'fields' object)"
|
||||
)
|
||||
skip_normalization: bool = SchemaField(
|
||||
description="Skip output normalization to get raw Airtable response (faster but may have missing fields)",
|
||||
default=False,
|
||||
)
|
||||
typecast: bool = SchemaField(
|
||||
description="Automatically convert string values to appropriate types",
|
||||
default=False,
|
||||
@@ -159,7 +241,6 @@ class AirtableCreateRecordsBlock(Block):
|
||||
|
||||
class Output(BlockSchema):
|
||||
records: list[dict] = SchemaField(description="Array of created record objects")
|
||||
details: dict = SchemaField(description="Details of the created records")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -173,7 +254,7 @@ class AirtableCreateRecordsBlock(Block):
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
# The create_record API expects records in a specific format
|
||||
|
||||
data = await create_record(
|
||||
credentials,
|
||||
input_data.base_id,
|
||||
@@ -182,11 +263,22 @@ class AirtableCreateRecordsBlock(Block):
|
||||
typecast=input_data.typecast if input_data.typecast else None,
|
||||
return_fields_by_field_id=input_data.return_fields_by_field_id,
|
||||
)
|
||||
result_records = cast(list[dict], data.get("records", []))
|
||||
|
||||
yield "records", data.get("records", [])
|
||||
details = data.get("details", None)
|
||||
if details:
|
||||
yield "details", details
|
||||
# Normalize output unless explicitly disabled
|
||||
if not input_data.skip_normalization and result_records:
|
||||
# Fetch table schema
|
||||
table_schema = await get_table_schema(
|
||||
credentials, input_data.base_id, input_data.table_id_or_name
|
||||
)
|
||||
|
||||
# Normalize the records
|
||||
normalized_data = await normalize_records(
|
||||
result_records, table_schema, include_field_metadata=False
|
||||
)
|
||||
result_records = normalized_data["records"]
|
||||
|
||||
yield "records", result_records
|
||||
|
||||
|
||||
class AirtableUpdateRecordsBlock(Block):
|
||||
|
||||
@@ -1094,6 +1094,117 @@ class GmailGetThreadBlock(GmailBase):
|
||||
return thread
|
||||
|
||||
|
||||
async def _build_reply_message(
|
||||
service, input_data, graph_exec_id: str, user_id: str
|
||||
) -> tuple[str, str]:
|
||||
"""
|
||||
Builds a reply MIME message for Gmail threads.
|
||||
|
||||
Returns:
|
||||
tuple: (base64-encoded raw message, threadId)
|
||||
"""
|
||||
# Get parent message for reply context
|
||||
parent = await asyncio.to_thread(
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.get(
|
||||
userId="me",
|
||||
id=input_data.parentMessageId,
|
||||
format="metadata",
|
||||
metadataHeaders=[
|
||||
"Subject",
|
||||
"References",
|
||||
"Message-ID",
|
||||
"From",
|
||||
"To",
|
||||
"Cc",
|
||||
"Reply-To",
|
||||
],
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Build headers dictionary, preserving all values for duplicate headers
|
||||
headers = {}
|
||||
for h in parent.get("payload", {}).get("headers", []):
|
||||
name = h["name"].lower()
|
||||
value = h["value"]
|
||||
if name in headers:
|
||||
# For duplicate headers, keep the first occurrence (most relevant for reply context)
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Determine recipients if not specified
|
||||
if not (input_data.to or input_data.cc or input_data.bcc):
|
||||
if input_data.replyAll:
|
||||
recipients = [parseaddr(headers.get("from", ""))[1]]
|
||||
recipients += [addr for _, addr in getaddresses([headers.get("to", "")])]
|
||||
recipients += [addr for _, addr in getaddresses([headers.get("cc", "")])]
|
||||
# Use dict.fromkeys() for O(n) deduplication while preserving order
|
||||
input_data.to = list(dict.fromkeys(filter(None, recipients)))
|
||||
else:
|
||||
# Check Reply-To header first, fall back to From header
|
||||
reply_to = headers.get("reply-to", "")
|
||||
from_addr = headers.get("from", "")
|
||||
sender = parseaddr(reply_to if reply_to else from_addr)[1]
|
||||
input_data.to = [sender] if sender else []
|
||||
|
||||
# Set subject with Re: prefix if not already present
|
||||
if input_data.subject:
|
||||
subject = input_data.subject
|
||||
else:
|
||||
parent_subject = headers.get("subject", "").strip()
|
||||
# Only add "Re:" if not already present (case-insensitive check)
|
||||
if parent_subject.lower().startswith("re:"):
|
||||
subject = parent_subject
|
||||
else:
|
||||
subject = f"Re: {parent_subject}" if parent_subject else "Re:"
|
||||
|
||||
# Build references header for proper threading
|
||||
references = headers.get("references", "").split()
|
||||
if headers.get("message-id"):
|
||||
references.append(headers["message-id"])
|
||||
|
||||
# Create MIME message
|
||||
msg = MIMEMultipart()
|
||||
if input_data.to:
|
||||
msg["To"] = ", ".join(input_data.to)
|
||||
if input_data.cc:
|
||||
msg["Cc"] = ", ".join(input_data.cc)
|
||||
if input_data.bcc:
|
||||
msg["Bcc"] = ", ".join(input_data.bcc)
|
||||
msg["Subject"] = subject
|
||||
if headers.get("message-id"):
|
||||
msg["In-Reply-To"] = headers["message-id"]
|
||||
if references:
|
||||
msg["References"] = " ".join(references)
|
||||
|
||||
# Use the helper function for consistent content type handling
|
||||
msg.attach(_make_mime_text(input_data.body, input_data.content_type))
|
||||
|
||||
# Handle attachments
|
||||
for attach in input_data.attachments:
|
||||
local_path = await store_media_file(
|
||||
user_id=user_id,
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=attach,
|
||||
return_content=False,
|
||||
)
|
||||
abs_path = get_exec_file_path(graph_exec_id, local_path)
|
||||
part = MIMEBase("application", "octet-stream")
|
||||
with open(abs_path, "rb") as f:
|
||||
part.set_payload(f.read())
|
||||
encoders.encode_base64(part)
|
||||
part.add_header(
|
||||
"Content-Disposition", f"attachment; filename={Path(abs_path).name}"
|
||||
)
|
||||
msg.attach(part)
|
||||
|
||||
# Encode message
|
||||
raw = base64.urlsafe_b64encode(msg.as_bytes()).decode("utf-8")
|
||||
return raw, input_data.threadId
|
||||
|
||||
|
||||
class GmailReplyBlock(GmailBase):
|
||||
"""
|
||||
Replies to Gmail threads with intelligent content type detection.
|
||||
@@ -1230,93 +1341,146 @@ class GmailReplyBlock(GmailBase):
|
||||
async def _reply(
|
||||
self, service, input_data: Input, graph_exec_id: str, user_id: str
|
||||
) -> dict:
|
||||
parent = await asyncio.to_thread(
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.get(
|
||||
userId="me",
|
||||
id=input_data.parentMessageId,
|
||||
format="metadata",
|
||||
metadataHeaders=[
|
||||
"Subject",
|
||||
"References",
|
||||
"Message-ID",
|
||||
"From",
|
||||
"To",
|
||||
"Cc",
|
||||
"Reply-To",
|
||||
],
|
||||
)
|
||||
.execute()
|
||||
# Build the reply message using the shared helper
|
||||
raw, thread_id = await _build_reply_message(
|
||||
service, input_data, graph_exec_id, user_id
|
||||
)
|
||||
|
||||
headers = {
|
||||
h["name"].lower(): h["value"]
|
||||
for h in parent.get("payload", {}).get("headers", [])
|
||||
}
|
||||
if not (input_data.to or input_data.cc or input_data.bcc):
|
||||
if input_data.replyAll:
|
||||
recipients = [parseaddr(headers.get("from", ""))[1]]
|
||||
recipients += [
|
||||
addr for _, addr in getaddresses([headers.get("to", "")])
|
||||
]
|
||||
recipients += [
|
||||
addr for _, addr in getaddresses([headers.get("cc", "")])
|
||||
]
|
||||
dedup: list[str] = []
|
||||
for r in recipients:
|
||||
if r and r not in dedup:
|
||||
dedup.append(r)
|
||||
input_data.to = dedup
|
||||
else:
|
||||
sender = parseaddr(headers.get("reply-to", headers.get("from", "")))[1]
|
||||
input_data.to = [sender] if sender else []
|
||||
subject = input_data.subject or (f"Re: {headers.get('subject', '')}".strip())
|
||||
references = headers.get("references", "").split()
|
||||
if headers.get("message-id"):
|
||||
references.append(headers["message-id"])
|
||||
|
||||
msg = MIMEMultipart()
|
||||
if input_data.to:
|
||||
msg["To"] = ", ".join(input_data.to)
|
||||
if input_data.cc:
|
||||
msg["Cc"] = ", ".join(input_data.cc)
|
||||
if input_data.bcc:
|
||||
msg["Bcc"] = ", ".join(input_data.bcc)
|
||||
msg["Subject"] = subject
|
||||
if headers.get("message-id"):
|
||||
msg["In-Reply-To"] = headers["message-id"]
|
||||
if references:
|
||||
msg["References"] = " ".join(references)
|
||||
# Use the new helper function for consistent content type handling
|
||||
msg.attach(_make_mime_text(input_data.body, input_data.content_type))
|
||||
|
||||
for attach in input_data.attachments:
|
||||
local_path = await store_media_file(
|
||||
user_id=user_id,
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=attach,
|
||||
return_content=False,
|
||||
)
|
||||
abs_path = get_exec_file_path(graph_exec_id, local_path)
|
||||
part = MIMEBase("application", "octet-stream")
|
||||
with open(abs_path, "rb") as f:
|
||||
part.set_payload(f.read())
|
||||
encoders.encode_base64(part)
|
||||
part.add_header(
|
||||
"Content-Disposition", f"attachment; filename={Path(abs_path).name}"
|
||||
)
|
||||
msg.attach(part)
|
||||
|
||||
raw = base64.urlsafe_b64encode(msg.as_bytes()).decode("utf-8")
|
||||
# Send the message
|
||||
return await asyncio.to_thread(
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"threadId": input_data.threadId, "raw": raw})
|
||||
.send(userId="me", body={"threadId": thread_id, "raw": raw})
|
||||
.execute()
|
||||
)
|
||||
|
||||
|
||||
class GmailDraftReplyBlock(GmailBase):
|
||||
"""
|
||||
Creates draft replies to Gmail threads with intelligent content type detection.
|
||||
|
||||
Features:
|
||||
- Automatic HTML detection: Draft replies containing HTML tags are formatted as text/html
|
||||
- No hard-wrap for plain text: Plain text draft replies preserve natural line flow
|
||||
- Manual content type override: Use content_type parameter to force specific format
|
||||
- Reply-all functionality: Option to reply to all original recipients
|
||||
- Thread preservation: Maintains proper email threading with headers
|
||||
- Full Unicode/emoji support with UTF-8 encoding
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
[
|
||||
"https://www.googleapis.com/auth/gmail.modify",
|
||||
"https://www.googleapis.com/auth/gmail.readonly",
|
||||
]
|
||||
)
|
||||
threadId: str = SchemaField(description="Thread ID to reply in")
|
||||
parentMessageId: str = SchemaField(
|
||||
description="ID of the message being replied to"
|
||||
)
|
||||
to: list[str] = SchemaField(description="To recipients", default_factory=list)
|
||||
cc: list[str] = SchemaField(description="CC recipients", default_factory=list)
|
||||
bcc: list[str] = SchemaField(description="BCC recipients", default_factory=list)
|
||||
replyAll: bool = SchemaField(
|
||||
description="Reply to all original recipients", default=False
|
||||
)
|
||||
subject: str = SchemaField(description="Email subject", default="")
|
||||
body: str = SchemaField(description="Email body (plain text or HTML)")
|
||||
content_type: Optional[Literal["auto", "plain", "html"]] = SchemaField(
|
||||
description="Content type: 'auto' (default - detects HTML), 'plain', or 'html'",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
attachments: list[MediaFileType] = SchemaField(
|
||||
description="Files to attach", default_factory=list, advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
draftId: str = SchemaField(description="Created draft ID")
|
||||
messageId: str = SchemaField(description="Draft message ID")
|
||||
threadId: str = SchemaField(description="Thread ID")
|
||||
status: str = SchemaField(description="Draft creation status")
|
||||
error: str = SchemaField(description="Error message if any")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d7a9f3e2-8b4c-4d6f-9e1a-3c5b7f8d2a6e",
|
||||
description="Create draft replies to Gmail threads with automatic HTML detection and proper text formatting. Plain text draft replies maintain natural paragraph flow without 78-character line wrapping. HTML content is automatically detected and formatted correctly.",
|
||||
categories={BlockCategory.COMMUNICATION},
|
||||
input_schema=GmailDraftReplyBlock.Input,
|
||||
output_schema=GmailDraftReplyBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"threadId": "t1",
|
||||
"parentMessageId": "m1",
|
||||
"body": "Thanks for your message. I'll review and get back to you.",
|
||||
"replyAll": False,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("draftId", "draft1"),
|
||||
("messageId", "m2"),
|
||||
("threadId", "t1"),
|
||||
("status", "draft_created"),
|
||||
],
|
||||
test_mock={
|
||||
"_create_draft_reply": lambda *args, **kwargs: {
|
||||
"id": "draft1",
|
||||
"message": {"id": "m2", "threadId": "t1"},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
async def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GoogleCredentials,
|
||||
graph_exec_id: str,
|
||||
user_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
draft = await self._create_draft_reply(
|
||||
service,
|
||||
input_data,
|
||||
graph_exec_id,
|
||||
user_id,
|
||||
)
|
||||
yield "draftId", draft["id"]
|
||||
yield "messageId", draft["message"]["id"]
|
||||
yield "threadId", draft["message"].get("threadId", input_data.threadId)
|
||||
yield "status", "draft_created"
|
||||
|
||||
async def _create_draft_reply(
|
||||
self, service, input_data: Input, graph_exec_id: str, user_id: str
|
||||
) -> dict:
|
||||
# Build the reply message using the shared helper
|
||||
raw, thread_id = await _build_reply_message(
|
||||
service, input_data, graph_exec_id, user_id
|
||||
)
|
||||
|
||||
# Create draft with proper thread association
|
||||
draft = await asyncio.to_thread(
|
||||
lambda: service.users()
|
||||
.drafts()
|
||||
.create(
|
||||
userId="me",
|
||||
body={
|
||||
"message": {
|
||||
"threadId": thread_id,
|
||||
"raw": raw,
|
||||
}
|
||||
},
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
|
||||
return draft
|
||||
|
||||
|
||||
class GmailGetProfileBlock(GmailBase):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
|
||||
@@ -1,57 +1,31 @@
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from autogpt_libs.api_key.key_manager import APIKeyManager
|
||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||
from prisma.enums import APIKeyPermission, APIKeyStatus
|
||||
from prisma.errors import PrismaError
|
||||
from prisma.models import APIKey as PrismaAPIKey
|
||||
from prisma.types import (
|
||||
APIKeyCreateInput,
|
||||
APIKeyUpdateInput,
|
||||
APIKeyWhereInput,
|
||||
APIKeyWhereUniqueInput,
|
||||
)
|
||||
from pydantic import BaseModel
|
||||
from prisma.types import APIKeyWhereUniqueInput
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.data.db import BaseDbModel
|
||||
from backend.util.exceptions import NotAuthorizedError, NotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
keysmith = APIKeySmith()
|
||||
|
||||
|
||||
# Some basic exceptions
|
||||
class APIKeyError(Exception):
|
||||
"""Base exception for API key operations"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class APIKeyNotFoundError(APIKeyError):
|
||||
"""Raised when an API key is not found"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class APIKeyPermissionError(APIKeyError):
|
||||
"""Raised when there are permission issues with API key operations"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class APIKeyValidationError(APIKeyError):
|
||||
"""Raised when API key validation fails"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class APIKey(BaseDbModel):
|
||||
class APIKeyInfo(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
prefix: str
|
||||
key: str
|
||||
status: APIKeyStatus = APIKeyStatus.ACTIVE
|
||||
permissions: List[APIKeyPermission]
|
||||
postfix: str
|
||||
head: str = Field(
|
||||
description=f"The first {APIKeySmith.HEAD_LENGTH} characters of the key"
|
||||
)
|
||||
tail: str = Field(
|
||||
description=f"The last {APIKeySmith.TAIL_LENGTH} characters of the key"
|
||||
)
|
||||
status: APIKeyStatus
|
||||
permissions: list[APIKeyPermission]
|
||||
created_at: datetime
|
||||
last_used_at: Optional[datetime] = None
|
||||
revoked_at: Optional[datetime] = None
|
||||
@@ -60,266 +34,211 @@ class APIKey(BaseDbModel):
|
||||
|
||||
@staticmethod
|
||||
def from_db(api_key: PrismaAPIKey):
|
||||
try:
|
||||
return APIKey(
|
||||
id=api_key.id,
|
||||
name=api_key.name,
|
||||
prefix=api_key.prefix,
|
||||
postfix=api_key.postfix,
|
||||
key=api_key.key,
|
||||
status=APIKeyStatus(api_key.status),
|
||||
permissions=[APIKeyPermission(p) for p in api_key.permissions],
|
||||
created_at=api_key.createdAt,
|
||||
last_used_at=api_key.lastUsedAt,
|
||||
revoked_at=api_key.revokedAt,
|
||||
description=api_key.description,
|
||||
user_id=api_key.userId,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating APIKey from db: {str(e)}")
|
||||
raise APIKeyError(f"Failed to create API key object: {str(e)}")
|
||||
return APIKeyInfo(
|
||||
id=api_key.id,
|
||||
name=api_key.name,
|
||||
head=api_key.head,
|
||||
tail=api_key.tail,
|
||||
status=APIKeyStatus(api_key.status),
|
||||
permissions=[APIKeyPermission(p) for p in api_key.permissions],
|
||||
created_at=api_key.createdAt,
|
||||
last_used_at=api_key.lastUsedAt,
|
||||
revoked_at=api_key.revokedAt,
|
||||
description=api_key.description,
|
||||
user_id=api_key.userId,
|
||||
)
|
||||
|
||||
|
||||
class APIKeyWithoutHash(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
prefix: str
|
||||
postfix: str
|
||||
status: APIKeyStatus
|
||||
permissions: List[APIKeyPermission]
|
||||
created_at: datetime
|
||||
last_used_at: Optional[datetime]
|
||||
revoked_at: Optional[datetime]
|
||||
description: Optional[str]
|
||||
user_id: str
|
||||
class APIKeyInfoWithHash(APIKeyInfo):
|
||||
hash: str
|
||||
salt: str | None = None # None for legacy keys
|
||||
|
||||
def match(self, plaintext_key: str) -> bool:
|
||||
"""Returns whether the given key matches this API key object."""
|
||||
return keysmith.verify_key(plaintext_key, self.hash, self.salt)
|
||||
|
||||
@staticmethod
|
||||
def from_db(api_key: PrismaAPIKey):
|
||||
try:
|
||||
return APIKeyWithoutHash(
|
||||
id=api_key.id,
|
||||
name=api_key.name,
|
||||
prefix=api_key.prefix,
|
||||
postfix=api_key.postfix,
|
||||
status=APIKeyStatus(api_key.status),
|
||||
permissions=[APIKeyPermission(p) for p in api_key.permissions],
|
||||
created_at=api_key.createdAt,
|
||||
last_used_at=api_key.lastUsedAt,
|
||||
revoked_at=api_key.revokedAt,
|
||||
description=api_key.description,
|
||||
user_id=api_key.userId,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating APIKeyWithoutHash from db: {str(e)}")
|
||||
raise APIKeyError(f"Failed to create API key object: {str(e)}")
|
||||
return APIKeyInfoWithHash(
|
||||
**APIKeyInfo.from_db(api_key).model_dump(),
|
||||
hash=api_key.hash,
|
||||
salt=api_key.salt,
|
||||
)
|
||||
|
||||
def without_hash(self) -> APIKeyInfo:
|
||||
return APIKeyInfo(**self.model_dump(exclude={"hash", "salt"}))
|
||||
|
||||
|
||||
async def generate_api_key(
|
||||
async def create_api_key(
|
||||
name: str,
|
||||
user_id: str,
|
||||
permissions: List[APIKeyPermission],
|
||||
permissions: list[APIKeyPermission],
|
||||
description: Optional[str] = None,
|
||||
) -> tuple[APIKeyWithoutHash, str]:
|
||||
) -> tuple[APIKeyInfo, str]:
|
||||
"""
|
||||
Generate a new API key and store it in the database.
|
||||
Returns the API key object (without hash) and the plain text key.
|
||||
"""
|
||||
try:
|
||||
api_manager = APIKeyManager()
|
||||
key = api_manager.generate_api_key()
|
||||
generated_key = keysmith.generate_key()
|
||||
|
||||
api_key = await PrismaAPIKey.prisma().create(
|
||||
data=APIKeyCreateInput(
|
||||
id=str(uuid.uuid4()),
|
||||
name=name,
|
||||
prefix=key.prefix,
|
||||
postfix=key.postfix,
|
||||
key=key.hash,
|
||||
permissions=[p for p in permissions],
|
||||
description=description,
|
||||
userId=user_id,
|
||||
)
|
||||
)
|
||||
saved_key_obj = await PrismaAPIKey.prisma().create(
|
||||
data={
|
||||
"id": str(uuid.uuid4()),
|
||||
"name": name,
|
||||
"head": generated_key.head,
|
||||
"tail": generated_key.tail,
|
||||
"hash": generated_key.hash,
|
||||
"salt": generated_key.salt,
|
||||
"permissions": [p for p in permissions],
|
||||
"description": description,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
|
||||
api_key_without_hash = APIKeyWithoutHash.from_db(api_key)
|
||||
return api_key_without_hash, key.raw
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while generating API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to generate API key: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while generating API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to generate API key: {str(e)}")
|
||||
return APIKeyInfo.from_db(saved_key_obj), generated_key.key
|
||||
|
||||
|
||||
async def validate_api_key(plain_text_key: str) -> Optional[APIKey]:
|
||||
async def get_active_api_keys_by_head(head: str) -> list[APIKeyInfoWithHash]:
|
||||
results = await PrismaAPIKey.prisma().find_many(
|
||||
where={"head": head, "status": APIKeyStatus.ACTIVE}
|
||||
)
|
||||
return [APIKeyInfoWithHash.from_db(key) for key in results]
|
||||
|
||||
|
||||
async def validate_api_key(plaintext_key: str) -> Optional[APIKeyInfo]:
|
||||
"""
|
||||
Validate an API key and return the API key object if valid.
|
||||
Validate an API key and return the API key object if valid and active.
|
||||
"""
|
||||
try:
|
||||
if not plain_text_key.startswith(APIKeyManager.PREFIX):
|
||||
if not plaintext_key.startswith(APIKeySmith.PREFIX):
|
||||
logger.warning("Invalid API key format")
|
||||
return None
|
||||
|
||||
prefix = plain_text_key[: APIKeyManager.PREFIX_LENGTH]
|
||||
api_manager = APIKeyManager()
|
||||
head = plaintext_key[: APIKeySmith.HEAD_LENGTH]
|
||||
potential_matches = await get_active_api_keys_by_head(head)
|
||||
|
||||
api_key = await PrismaAPIKey.prisma().find_first(
|
||||
where=APIKeyWhereInput(prefix=prefix, status=(APIKeyStatus.ACTIVE))
|
||||
matched_api_key = next(
|
||||
(pm for pm in potential_matches if pm.match(plaintext_key)),
|
||||
None,
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
logger.warning(f"No active API key found with prefix {prefix}")
|
||||
if not matched_api_key:
|
||||
# API key not found or invalid
|
||||
return None
|
||||
|
||||
is_valid = api_manager.verify_api_key(plain_text_key, api_key.key)
|
||||
if not is_valid:
|
||||
logger.warning("API key verification failed")
|
||||
return None
|
||||
|
||||
return APIKey.from_db(api_key)
|
||||
except Exception as e:
|
||||
logger.error(f"Error validating API key: {str(e)}")
|
||||
raise APIKeyValidationError(f"Failed to validate API key: {str(e)}")
|
||||
|
||||
|
||||
async def revoke_api_key(key_id: str, user_id: str) -> Optional[APIKeyWithoutHash]:
|
||||
try:
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where={"id": key_id})
|
||||
|
||||
if not api_key:
|
||||
raise APIKeyNotFoundError(f"API key with id {key_id} not found")
|
||||
|
||||
if api_key.userId != user_id:
|
||||
raise APIKeyPermissionError(
|
||||
"You do not have permission to revoke this API key."
|
||||
# Migrate legacy keys to secure format on successful validation
|
||||
if matched_api_key.salt is None:
|
||||
matched_api_key = await _migrate_key_to_secure_hash(
|
||||
plaintext_key, matched_api_key
|
||||
)
|
||||
|
||||
where_clause: APIKeyWhereUniqueInput = {"id": key_id}
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where=where_clause,
|
||||
data=APIKeyUpdateInput(
|
||||
status=APIKeyStatus.REVOKED, revokedAt=datetime.now(timezone.utc)
|
||||
),
|
||||
)
|
||||
return matched_api_key.without_hash()
|
||||
except Exception as e:
|
||||
logger.error(f"Error while validating API key: {e}")
|
||||
raise RuntimeError("Failed to validate API key") from e
|
||||
|
||||
if updated_api_key:
|
||||
return APIKeyWithoutHash.from_db(updated_api_key)
|
||||
|
||||
async def _migrate_key_to_secure_hash(
|
||||
plaintext_key: str, key_obj: APIKeyInfoWithHash
|
||||
) -> APIKeyInfoWithHash:
|
||||
"""Replace the SHA256 hash of a legacy API key with a salted Scrypt hash."""
|
||||
try:
|
||||
new_hash, new_salt = keysmith.hash_key(plaintext_key)
|
||||
await PrismaAPIKey.prisma().update(
|
||||
where={"id": key_obj.id}, data={"hash": new_hash, "salt": new_salt}
|
||||
)
|
||||
logger.info(f"Migrated legacy API key #{key_obj.id} to secure format")
|
||||
# Update the API key object with new values for return
|
||||
key_obj.hash = new_hash
|
||||
key_obj.salt = new_salt
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to migrate legacy API key #{key_obj.id}: {e}")
|
||||
|
||||
return key_obj
|
||||
|
||||
|
||||
async def revoke_api_key(key_id: str, user_id: str) -> APIKeyInfo:
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where={"id": key_id})
|
||||
|
||||
if not api_key:
|
||||
raise NotFoundError(f"API key with id {key_id} not found")
|
||||
|
||||
if api_key.userId != user_id:
|
||||
raise NotAuthorizedError("You do not have permission to revoke this API key.")
|
||||
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where={"id": key_id},
|
||||
data={
|
||||
"status": APIKeyStatus.REVOKED,
|
||||
"revokedAt": datetime.now(timezone.utc),
|
||||
},
|
||||
)
|
||||
if not updated_api_key:
|
||||
raise NotFoundError(f"API key #{key_id} vanished while trying to revoke.")
|
||||
|
||||
return APIKeyInfo.from_db(updated_api_key)
|
||||
|
||||
|
||||
async def list_user_api_keys(user_id: str) -> list[APIKeyInfo]:
|
||||
api_keys = await PrismaAPIKey.prisma().find_many(
|
||||
where={"userId": user_id}, order={"createdAt": "desc"}
|
||||
)
|
||||
|
||||
return [APIKeyInfo.from_db(key) for key in api_keys]
|
||||
|
||||
|
||||
async def suspend_api_key(key_id: str, user_id: str) -> APIKeyInfo:
|
||||
selector: APIKeyWhereUniqueInput = {"id": key_id}
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where=selector)
|
||||
|
||||
if not api_key:
|
||||
raise NotFoundError(f"API key with id {key_id} not found")
|
||||
|
||||
if api_key.userId != user_id:
|
||||
raise NotAuthorizedError("You do not have permission to suspend this API key.")
|
||||
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where=selector, data={"status": APIKeyStatus.SUSPENDED}
|
||||
)
|
||||
if not updated_api_key:
|
||||
raise NotFoundError(f"API key #{key_id} vanished while trying to suspend.")
|
||||
|
||||
return APIKeyInfo.from_db(updated_api_key)
|
||||
|
||||
|
||||
def has_permission(api_key: APIKeyInfo, required_permission: APIKeyPermission) -> bool:
|
||||
return required_permission in api_key.permissions
|
||||
|
||||
|
||||
async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyInfo]:
|
||||
api_key = await PrismaAPIKey.prisma().find_first(
|
||||
where={"id": key_id, "userId": user_id}
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
return None
|
||||
except (APIKeyNotFoundError, APIKeyPermissionError) as e:
|
||||
raise e
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while revoking API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to revoke API key: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while revoking API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to revoke API key: {str(e)}")
|
||||
|
||||
|
||||
async def list_user_api_keys(user_id: str) -> List[APIKeyWithoutHash]:
|
||||
try:
|
||||
where_clause: APIKeyWhereInput = {"userId": user_id}
|
||||
|
||||
api_keys = await PrismaAPIKey.prisma().find_many(
|
||||
where=where_clause, order={"createdAt": "desc"}
|
||||
)
|
||||
|
||||
return [APIKeyWithoutHash.from_db(key) for key in api_keys]
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while listing API keys: {str(e)}")
|
||||
raise APIKeyError(f"Failed to list API keys: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while listing API keys: {str(e)}")
|
||||
raise APIKeyError(f"Failed to list API keys: {str(e)}")
|
||||
|
||||
|
||||
async def suspend_api_key(key_id: str, user_id: str) -> Optional[APIKeyWithoutHash]:
|
||||
try:
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where={"id": key_id})
|
||||
|
||||
if not api_key:
|
||||
raise APIKeyNotFoundError(f"API key with id {key_id} not found")
|
||||
|
||||
if api_key.userId != user_id:
|
||||
raise APIKeyPermissionError(
|
||||
"You do not have permission to suspend this API key."
|
||||
)
|
||||
|
||||
where_clause: APIKeyWhereUniqueInput = {"id": key_id}
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where=where_clause,
|
||||
data=APIKeyUpdateInput(status=APIKeyStatus.SUSPENDED),
|
||||
)
|
||||
|
||||
if updated_api_key:
|
||||
return APIKeyWithoutHash.from_db(updated_api_key)
|
||||
return None
|
||||
except (APIKeyNotFoundError, APIKeyPermissionError) as e:
|
||||
raise e
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while suspending API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to suspend API key: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while suspending API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to suspend API key: {str(e)}")
|
||||
|
||||
|
||||
def has_permission(api_key: APIKey, required_permission: APIKeyPermission) -> bool:
|
||||
try:
|
||||
return required_permission in api_key.permissions
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking API key permissions: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
async def get_api_key_by_id(key_id: str, user_id: str) -> Optional[APIKeyWithoutHash]:
|
||||
try:
|
||||
api_key = await PrismaAPIKey.prisma().find_first(
|
||||
where=APIKeyWhereInput(id=key_id, userId=user_id)
|
||||
)
|
||||
|
||||
if not api_key:
|
||||
return None
|
||||
|
||||
return APIKeyWithoutHash.from_db(api_key)
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while getting API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to get API key: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while getting API key: {str(e)}")
|
||||
raise APIKeyError(f"Failed to get API key: {str(e)}")
|
||||
return APIKeyInfo.from_db(api_key)
|
||||
|
||||
|
||||
async def update_api_key_permissions(
|
||||
key_id: str, user_id: str, permissions: List[APIKeyPermission]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
key_id: str, user_id: str, permissions: list[APIKeyPermission]
|
||||
) -> APIKeyInfo:
|
||||
"""
|
||||
Update the permissions of an API key.
|
||||
"""
|
||||
try:
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where={"id": key_id})
|
||||
api_key = await PrismaAPIKey.prisma().find_unique(where={"id": key_id})
|
||||
|
||||
if api_key is None:
|
||||
raise APIKeyNotFoundError("No such API key found.")
|
||||
if api_key is None:
|
||||
raise NotFoundError("No such API key found.")
|
||||
|
||||
if api_key.userId != user_id:
|
||||
raise APIKeyPermissionError(
|
||||
"You do not have permission to update this API key."
|
||||
)
|
||||
if api_key.userId != user_id:
|
||||
raise NotAuthorizedError("You do not have permission to update this API key.")
|
||||
|
||||
where_clause: APIKeyWhereUniqueInput = {"id": key_id}
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where=where_clause,
|
||||
data=APIKeyUpdateInput(permissions=permissions),
|
||||
)
|
||||
updated_api_key = await PrismaAPIKey.prisma().update(
|
||||
where={"id": key_id},
|
||||
data={"permissions": permissions},
|
||||
)
|
||||
if not updated_api_key:
|
||||
raise NotFoundError(f"API key #{key_id} vanished while trying to update.")
|
||||
|
||||
if updated_api_key:
|
||||
return APIKeyWithoutHash.from_db(updated_api_key)
|
||||
return None
|
||||
except (APIKeyNotFoundError, APIKeyPermissionError) as e:
|
||||
raise e
|
||||
except PrismaError as e:
|
||||
logger.error(f"Database error while updating API key permissions: {str(e)}")
|
||||
raise APIKeyError(f"Failed to update API key permissions: {str(e)}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while updating API key permissions: {str(e)}")
|
||||
raise APIKeyError(f"Failed to update API key permissions: {str(e)}")
|
||||
return APIKeyInfo.from_db(updated_api_key)
|
||||
|
||||
@@ -91,6 +91,45 @@ class BlockCategory(Enum):
|
||||
return {"category": self.name, "description": self.value}
|
||||
|
||||
|
||||
class BlockCostType(str, Enum):
|
||||
RUN = "run" # cost X credits per run
|
||||
BYTE = "byte" # cost X credits per byte
|
||||
SECOND = "second" # cost X credits per second
|
||||
|
||||
|
||||
class BlockCost(BaseModel):
|
||||
cost_amount: int
|
||||
cost_filter: BlockInput
|
||||
cost_type: BlockCostType
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cost_amount: int,
|
||||
cost_type: BlockCostType = BlockCostType.RUN,
|
||||
cost_filter: Optional[BlockInput] = None,
|
||||
**data: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
cost_amount=cost_amount,
|
||||
cost_filter=cost_filter or {},
|
||||
cost_type=cost_type,
|
||||
**data,
|
||||
)
|
||||
|
||||
|
||||
class BlockInfo(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
inputSchema: dict[str, Any]
|
||||
outputSchema: dict[str, Any]
|
||||
costs: list[BlockCost]
|
||||
description: str
|
||||
categories: list[dict[str, str]]
|
||||
contributors: list[dict[str, Any]]
|
||||
staticOutput: bool
|
||||
uiType: str
|
||||
|
||||
|
||||
class BlockSchema(BaseModel):
|
||||
cached_jsonschema: ClassVar[dict[str, Any]]
|
||||
|
||||
@@ -454,6 +493,24 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
"uiType": self.block_type.value,
|
||||
}
|
||||
|
||||
def get_info(self) -> BlockInfo:
|
||||
from backend.data.credit import get_block_cost
|
||||
|
||||
return BlockInfo(
|
||||
id=self.id,
|
||||
name=self.name,
|
||||
inputSchema=self.input_schema.jsonschema(),
|
||||
outputSchema=self.output_schema.jsonschema(),
|
||||
costs=get_block_cost(self),
|
||||
description=self.description,
|
||||
categories=[category.dict() for category in self.categories],
|
||||
contributors=[
|
||||
contributor.model_dump() for contributor in self.contributors
|
||||
],
|
||||
staticOutput=self.static_output,
|
||||
uiType=self.block_type.value,
|
||||
)
|
||||
|
||||
async def execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
|
||||
if error := self.input_schema.validate_data(input_data):
|
||||
raise ValueError(
|
||||
|
||||
@@ -29,8 +29,7 @@ from backend.blocks.replicate.replicate_block import ReplicateModelBlock
|
||||
from backend.blocks.smart_decision_maker import SmartDecisionMakerBlock
|
||||
from backend.blocks.talking_head import CreateTalkingAvatarVideoBlock
|
||||
from backend.blocks.text_to_speech_block import UnrealTextToSpeechBlock
|
||||
from backend.data.block import Block
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.data.block import Block, BlockCost, BlockCostType
|
||||
from backend.integrations.credentials_store import (
|
||||
aiml_api_credentials,
|
||||
anthropic_credentials,
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockInput
|
||||
|
||||
|
||||
class BlockCostType(str, Enum):
|
||||
RUN = "run" # cost X credits per run
|
||||
BYTE = "byte" # cost X credits per byte
|
||||
SECOND = "second" # cost X credits per second
|
||||
|
||||
|
||||
class BlockCost(BaseModel):
|
||||
cost_amount: int
|
||||
cost_filter: BlockInput
|
||||
cost_type: BlockCostType
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cost_amount: int,
|
||||
cost_type: BlockCostType = BlockCostType.RUN,
|
||||
cost_filter: Optional[BlockInput] = None,
|
||||
**data: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
cost_amount=cost_amount,
|
||||
cost_filter=cost_filter or {},
|
||||
cost_type=cost_type,
|
||||
**data,
|
||||
)
|
||||
@@ -2,7 +2,7 @@ import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
import stripe
|
||||
from prisma import Json
|
||||
@@ -23,7 +23,6 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost
|
||||
from backend.data.model import (
|
||||
AutoTopUpConfig,
|
||||
RefundRequest,
|
||||
@@ -41,6 +40,9 @@ from backend.util.models import Pagination
|
||||
from backend.util.retry import func_retry
|
||||
from backend.util.settings import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.data.block import Block, BlockCost
|
||||
|
||||
settings = Settings()
|
||||
stripe.api_key = settings.secrets.stripe_api_key
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -997,10 +999,14 @@ def get_user_credit_model() -> UserCreditBase:
|
||||
return UserCredit()
|
||||
|
||||
|
||||
def get_block_costs() -> dict[str, list[BlockCost]]:
|
||||
def get_block_costs() -> dict[str, list["BlockCost"]]:
|
||||
return {block().id: costs for block, costs in BLOCK_COSTS.items()}
|
||||
|
||||
|
||||
def get_block_cost(block: "Block") -> list["BlockCost"]:
|
||||
return BLOCK_COSTS.get(block.__class__, [])
|
||||
|
||||
|
||||
async def get_stripe_customer_id(user_id: str) -> str:
|
||||
user = await get_user_by_id(user_id)
|
||||
|
||||
|
||||
@@ -160,6 +160,7 @@ class BaseGraph(BaseDbModel):
|
||||
is_active: bool = True
|
||||
name: str
|
||||
description: str
|
||||
recommended_schedule_cron: str | None = None
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
forked_from_id: str | None = None
|
||||
@@ -696,6 +697,7 @@ class GraphModel(Graph):
|
||||
is_active=graph.isActive,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
recommended_schedule_cron=graph.recommendedScheduleCron,
|
||||
nodes=[NodeModel.from_db(node, for_export) for node in graph.Nodes or []],
|
||||
links=list(
|
||||
{
|
||||
@@ -1083,6 +1085,7 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
version=graph.version,
|
||||
name=graph.name,
|
||||
description=graph.description,
|
||||
recommendedScheduleCron=graph.recommended_schedule_cron,
|
||||
isActive=graph.is_active,
|
||||
userId=user_id,
|
||||
forkedFromId=graph.forked_from_id,
|
||||
|
||||
@@ -10,9 +10,15 @@ from pydantic import BaseModel, JsonValue, ValidationError
|
||||
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.block import Block, BlockInput, BlockOutputEntry, BlockType, get_block
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCostType,
|
||||
BlockInput,
|
||||
BlockOutputEntry,
|
||||
BlockType,
|
||||
get_block,
|
||||
)
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCostType
|
||||
from backend.data.db import prisma
|
||||
from backend.data.execution import (
|
||||
ExecutionStatus,
|
||||
|
||||
@@ -63,7 +63,7 @@ except ImportError:
|
||||
|
||||
# Cost System
|
||||
try:
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.data.block import BlockCost, BlockCostType
|
||||
except ImportError:
|
||||
from backend.data.block_cost_config import BlockCost, BlockCostType
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ from typing import Callable, List, Optional, Type
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.data.block import BlockCost, BlockCostType
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
Credentials,
|
||||
|
||||
@@ -8,9 +8,8 @@ BLOCK_COSTS configuration used by the execution system.
|
||||
import logging
|
||||
from typing import List, Type
|
||||
|
||||
from backend.data.block import Block
|
||||
from backend.data.block import Block, BlockCost
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost
|
||||
from backend.sdk.registry import AutoRegistry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Any, Callable, List, Optional, Set, Type
|
||||
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.cost import BlockCost
|
||||
from backend.data.block import BlockCost
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
Credentials,
|
||||
|
||||
@@ -2,12 +2,12 @@ from fastapi import HTTPException, Security
|
||||
from fastapi.security import APIKeyHeader
|
||||
from prisma.enums import APIKeyPermission
|
||||
|
||||
from backend.data.api_key import APIKey, has_permission, validate_api_key
|
||||
from backend.data.api_key import APIKeyInfo, has_permission, validate_api_key
|
||||
|
||||
api_key_header = APIKeyHeader(name="X-API-Key", auto_error=False)
|
||||
|
||||
|
||||
async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKey:
|
||||
async def require_api_key(api_key: str | None = Security(api_key_header)) -> APIKeyInfo:
|
||||
"""Base middleware for API key authentication"""
|
||||
if api_key is None:
|
||||
raise HTTPException(status_code=401, detail="Missing API key")
|
||||
@@ -23,7 +23,9 @@ async def require_api_key(api_key: str | None = Security(api_key_header)) -> API
|
||||
def require_permission(permission: APIKeyPermission):
|
||||
"""Dependency function for checking specific permissions"""
|
||||
|
||||
async def check_permission(api_key: APIKey = Security(require_api_key)):
|
||||
async def check_permission(
|
||||
api_key: APIKeyInfo = Security(require_api_key),
|
||||
) -> APIKeyInfo:
|
||||
if not has_permission(api_key, permission):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
|
||||
@@ -9,7 +9,7 @@ from typing_extensions import TypedDict
|
||||
import backend.data.block
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.api_key import APIKey
|
||||
from backend.data.api_key import APIKeyInfo
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.executor.utils import add_graph_execution
|
||||
from backend.server.external.middleware import require_permission
|
||||
@@ -62,7 +62,7 @@ def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
||||
async def execute_graph_block(
|
||||
block_id: str,
|
||||
data: BlockInput,
|
||||
api_key: APIKey = Security(require_permission(APIKeyPermission.EXECUTE_BLOCK)),
|
||||
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_BLOCK)),
|
||||
) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
@@ -82,7 +82,7 @@ async def execute_graph(
|
||||
graph_id: str,
|
||||
graph_version: int,
|
||||
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
|
||||
api_key: APIKey = Security(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
|
||||
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
|
||||
) -> dict[str, Any]:
|
||||
try:
|
||||
graph_exec = await add_graph_execution(
|
||||
@@ -104,7 +104,7 @@ async def execute_graph(
|
||||
async def get_graph_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
api_key: APIKey = Security(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||
api_key: APIKeyInfo = Security(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||
) -> GraphExecutionResult:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=api_key.user_id)
|
||||
if not graph:
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Any, Optional
|
||||
|
||||
import pydantic
|
||||
|
||||
from backend.data.api_key import APIKeyPermission, APIKeyWithoutHash
|
||||
from backend.data.api_key import APIKeyInfo, APIKeyPermission
|
||||
from backend.data.graph import Graph
|
||||
from backend.util.timezone_name import TimeZoneName
|
||||
|
||||
@@ -45,7 +45,7 @@ class CreateAPIKeyRequest(pydantic.BaseModel):
|
||||
|
||||
|
||||
class CreateAPIKeyResponse(pydantic.BaseModel):
|
||||
api_key: APIKeyWithoutHash
|
||||
api_key: APIKeyInfo
|
||||
plain_text_key: str
|
||||
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ from autogpt_libs.auth import add_auth_responses_to_openapi
|
||||
from autogpt_libs.auth import verify_settings as verify_auth_settings
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.routing import APIRoute
|
||||
from prisma.errors import PrismaError
|
||||
|
||||
import backend.data.block
|
||||
import backend.data.db
|
||||
@@ -39,6 +40,7 @@ from backend.server.external.api import external_app
|
||||
from backend.server.middleware.security import SecurityHeadersMiddleware
|
||||
from backend.util import json
|
||||
from backend.util.cloud_storage import shutdown_cloud_storage_handler
|
||||
from backend.util.exceptions import NotAuthorizedError, NotFoundError
|
||||
from backend.util.feature_flag import initialize_launchdarkly, shutdown_launchdarkly
|
||||
from backend.util.service import UnhealthyServiceError
|
||||
|
||||
@@ -195,10 +197,14 @@ async def validation_error_handler(
|
||||
)
|
||||
|
||||
|
||||
app.add_exception_handler(PrismaError, handle_internal_http_error(500))
|
||||
app.add_exception_handler(NotFoundError, handle_internal_http_error(404, False))
|
||||
app.add_exception_handler(NotAuthorizedError, handle_internal_http_error(403, False))
|
||||
app.add_exception_handler(RequestValidationError, validation_error_handler)
|
||||
app.add_exception_handler(pydantic.ValidationError, validation_error_handler)
|
||||
app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
||||
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
||||
|
||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"], prefix="/api")
|
||||
app.include_router(
|
||||
backend.server.v2.store.routes.router, tags=["v2"], prefix="/api/store"
|
||||
@@ -246,13 +252,19 @@ async def health():
|
||||
|
||||
class AgentServer(backend.util.service.AppProcess):
|
||||
def run(self):
|
||||
server_app = starlette.middleware.cors.CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"], # Allows all methods
|
||||
allow_headers=["*"], # Allows all headers
|
||||
)
|
||||
|
||||
if settings.config.enable_cors_all_origins:
|
||||
server_app = starlette.middleware.cors.CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"], # Allows all methods
|
||||
allow_headers=["*"], # Allows all headers
|
||||
)
|
||||
else:
|
||||
logger.info("CORS is disabled")
|
||||
server_app = app
|
||||
|
||||
uvicorn.run(
|
||||
server_app,
|
||||
host=backend.util.settings.Config().agent_api_host,
|
||||
|
||||
@@ -27,20 +27,9 @@ from typing_extensions import Optional, TypedDict
|
||||
import backend.server.integrations.router
|
||||
import backend.server.routers.analytics
|
||||
import backend.server.v2.library.db as library_db
|
||||
from backend.data import api_key as api_key_db
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.api_key import (
|
||||
APIKeyError,
|
||||
APIKeyNotFoundError,
|
||||
APIKeyPermissionError,
|
||||
APIKeyWithoutHash,
|
||||
generate_api_key,
|
||||
get_api_key_by_id,
|
||||
list_user_api_keys,
|
||||
revoke_api_key,
|
||||
suspend_api_key,
|
||||
update_api_key_permissions,
|
||||
)
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput, get_block, get_blocks
|
||||
from backend.data.credit import (
|
||||
AutoTopUpConfig,
|
||||
@@ -176,7 +165,6 @@ async def get_user_timezone_route(
|
||||
summary="Update user timezone",
|
||||
tags=["auth"],
|
||||
dependencies=[Security(requires_user)],
|
||||
response_model=TimezoneResponse,
|
||||
)
|
||||
async def update_user_timezone_route(
|
||||
user_id: Annotated[str, Security(get_user_id)], request: UpdateTimezoneRequest
|
||||
@@ -1082,7 +1070,6 @@ async def delete_graph_execution_schedule(
|
||||
@v1_router.post(
|
||||
"/api-keys",
|
||||
summary="Create new API key",
|
||||
response_model=CreateAPIKeyResponse,
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
@@ -1090,128 +1077,73 @@ async def create_api_key(
|
||||
request: CreateAPIKeyRequest, user_id: Annotated[str, Security(get_user_id)]
|
||||
) -> CreateAPIKeyResponse:
|
||||
"""Create a new API key"""
|
||||
try:
|
||||
api_key, plain_text = await generate_api_key(
|
||||
name=request.name,
|
||||
user_id=user_id,
|
||||
permissions=request.permissions,
|
||||
description=request.description,
|
||||
)
|
||||
return CreateAPIKeyResponse(api_key=api_key, plain_text_key=plain_text)
|
||||
except APIKeyError as e:
|
||||
logger.error(
|
||||
"Could not create API key for user %s: %s. Review input and permissions.",
|
||||
user_id,
|
||||
e,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"message": str(e), "hint": "Verify request payload and try again."},
|
||||
)
|
||||
api_key_info, plain_text_key = await api_key_db.create_api_key(
|
||||
name=request.name,
|
||||
user_id=user_id,
|
||||
permissions=request.permissions,
|
||||
description=request.description,
|
||||
)
|
||||
return CreateAPIKeyResponse(api_key=api_key_info, plain_text_key=plain_text_key)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys",
|
||||
summary="List user API keys",
|
||||
response_model=list[APIKeyWithoutHash] | dict[str, str],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def get_api_keys(
|
||||
user_id: Annotated[str, Security(get_user_id)],
|
||||
) -> list[APIKeyWithoutHash]:
|
||||
) -> list[api_key_db.APIKeyInfo]:
|
||||
"""List all API keys for the user"""
|
||||
try:
|
||||
return await list_user_api_keys(user_id)
|
||||
except APIKeyError as e:
|
||||
logger.error("Failed to list API keys for user %s: %s", user_id, e)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"message": str(e), "hint": "Check API key service availability."},
|
||||
)
|
||||
return await api_key_db.list_user_api_keys(user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys/{key_id}",
|
||||
summary="Get specific API key",
|
||||
response_model=APIKeyWithoutHash,
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def get_api_key(
|
||||
key_id: str, user_id: Annotated[str, Security(get_user_id)]
|
||||
) -> APIKeyWithoutHash:
|
||||
) -> api_key_db.APIKeyInfo:
|
||||
"""Get a specific API key"""
|
||||
try:
|
||||
api_key = await get_api_key_by_id(key_id, user_id)
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
return api_key
|
||||
except APIKeyError as e:
|
||||
logger.error("Error retrieving API key %s for user %s: %s", key_id, user_id, e)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"message": str(e), "hint": "Ensure the key ID is correct."},
|
||||
)
|
||||
api_key = await api_key_db.get_api_key_by_id(key_id, user_id)
|
||||
if not api_key:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
return api_key
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
"/api-keys/{key_id}",
|
||||
summary="Revoke API key",
|
||||
response_model=APIKeyWithoutHash,
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def delete_api_key(
|
||||
key_id: str, user_id: Annotated[str, Security(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
) -> api_key_db.APIKeyInfo:
|
||||
"""Revoke an API key"""
|
||||
try:
|
||||
return await revoke_api_key(key_id, user_id)
|
||||
except APIKeyNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
except APIKeyPermissionError:
|
||||
raise HTTPException(status_code=403, detail="Permission denied")
|
||||
except APIKeyError as e:
|
||||
logger.error("Failed to revoke API key %s for user %s: %s", key_id, user_id, e)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={
|
||||
"message": str(e),
|
||||
"hint": "Verify permissions or try again later.",
|
||||
},
|
||||
)
|
||||
return await api_key_db.revoke_api_key(key_id, user_id)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
"/api-keys/{key_id}/suspend",
|
||||
summary="Suspend API key",
|
||||
response_model=APIKeyWithoutHash,
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
async def suspend_key(
|
||||
key_id: str, user_id: Annotated[str, Security(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
) -> api_key_db.APIKeyInfo:
|
||||
"""Suspend an API key"""
|
||||
try:
|
||||
return await suspend_api_key(key_id, user_id)
|
||||
except APIKeyNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
except APIKeyPermissionError:
|
||||
raise HTTPException(status_code=403, detail="Permission denied")
|
||||
except APIKeyError as e:
|
||||
logger.error("Failed to suspend API key %s for user %s: %s", key_id, user_id, e)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"message": str(e), "hint": "Check user permissions and retry."},
|
||||
)
|
||||
return await api_key_db.suspend_api_key(key_id, user_id)
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
"/api-keys/{key_id}/permissions",
|
||||
summary="Update key permissions",
|
||||
response_model=APIKeyWithoutHash,
|
||||
tags=["api-keys"],
|
||||
dependencies=[Security(requires_user)],
|
||||
)
|
||||
@@ -1219,22 +1151,8 @@ async def update_permissions(
|
||||
key_id: str,
|
||||
request: UpdatePermissionsRequest,
|
||||
user_id: Annotated[str, Security(get_user_id)],
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
) -> api_key_db.APIKeyInfo:
|
||||
"""Update API key permissions"""
|
||||
try:
|
||||
return await update_api_key_permissions(key_id, user_id, request.permissions)
|
||||
except APIKeyNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="API key not found")
|
||||
except APIKeyPermissionError:
|
||||
raise HTTPException(status_code=403, detail="Permission denied")
|
||||
except APIKeyError as e:
|
||||
logger.error(
|
||||
"Failed to update permissions for API key %s of user %s: %s",
|
||||
key_id,
|
||||
user_id,
|
||||
e,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail={"message": str(e), "hint": "Ensure permissions list is valid."},
|
||||
)
|
||||
return await api_key_db.update_api_key_permissions(
|
||||
key_id, user_id, request.permissions
|
||||
)
|
||||
|
||||
@@ -3,8 +3,9 @@ API Key authentication utilities for FastAPI applications.
|
||||
"""
|
||||
|
||||
import inspect
|
||||
import logging
|
||||
import secrets
|
||||
from typing import Any, Callable, Optional
|
||||
from typing import Any, Awaitable, Callable, Optional
|
||||
|
||||
from fastapi import HTTPException, Request
|
||||
from fastapi.security import APIKeyHeader
|
||||
@@ -12,6 +13,8 @@ from starlette.status import HTTP_401_UNAUTHORIZED
|
||||
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIKeyAuthenticator(APIKeyHeader):
|
||||
"""
|
||||
@@ -51,7 +54,8 @@ class APIKeyAuthenticator(APIKeyHeader):
|
||||
header_name (str): The name of the header containing the API key
|
||||
expected_token (Optional[str]): The expected API key value for simple token matching
|
||||
validator (Optional[Callable]): Custom validation function that takes an API key
|
||||
string and returns a boolean or object. Can be async.
|
||||
string and returns a truthy value if and only if the passed string is a
|
||||
valid API key. Can be async.
|
||||
status_if_missing (int): HTTP status code to use for validation errors
|
||||
message_if_invalid (str): Error message to return when validation fails
|
||||
"""
|
||||
@@ -60,7 +64,9 @@ class APIKeyAuthenticator(APIKeyHeader):
|
||||
self,
|
||||
header_name: str,
|
||||
expected_token: Optional[str] = None,
|
||||
validator: Optional[Callable[[str], bool]] = None,
|
||||
validator: Optional[
|
||||
Callable[[str], Any] | Callable[[str], Awaitable[Any]]
|
||||
] = None,
|
||||
status_if_missing: int = HTTP_401_UNAUTHORIZED,
|
||||
message_if_invalid: str = "Invalid API key",
|
||||
):
|
||||
@@ -75,7 +81,7 @@ class APIKeyAuthenticator(APIKeyHeader):
|
||||
self.message_if_invalid = message_if_invalid
|
||||
|
||||
async def __call__(self, request: Request) -> Any:
|
||||
api_key = await super()(request)
|
||||
api_key = await super().__call__(request)
|
||||
if api_key is None:
|
||||
raise HTTPException(
|
||||
status_code=self.status_if_missing, detail="No API key in request"
|
||||
@@ -106,4 +112,9 @@ class APIKeyAuthenticator(APIKeyHeader):
|
||||
f"{self.__class__.__name__}.expected_token is not set; "
|
||||
"either specify it or provide a custom validator"
|
||||
)
|
||||
return secrets.compare_digest(api_key, self.expected_token)
|
||||
try:
|
||||
return secrets.compare_digest(api_key, self.expected_token)
|
||||
except TypeError as e:
|
||||
# If value is not an ASCII string, compare_digest raises a TypeError
|
||||
logger.warning(f"{self.model.name} API key check failed: {e}")
|
||||
return False
|
||||
|
||||
@@ -0,0 +1,537 @@
|
||||
"""
|
||||
Unit tests for APIKeyAuthenticator class.
|
||||
"""
|
||||
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException, Request
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED, HTTP_403_FORBIDDEN
|
||||
|
||||
from backend.server.utils.api_key_auth import APIKeyAuthenticator
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_request():
|
||||
"""Create a mock request object."""
|
||||
request = Mock(spec=Request)
|
||||
request.state = Mock()
|
||||
request.headers = {}
|
||||
return request
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_key_auth():
|
||||
"""Create a basic APIKeyAuthenticator instance."""
|
||||
return APIKeyAuthenticator(
|
||||
header_name="X-API-Key", expected_token="test-secret-token"
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_key_auth_custom_validator():
|
||||
"""Create APIKeyAuthenticator with custom validator."""
|
||||
|
||||
def custom_validator(api_key: str) -> bool:
|
||||
return api_key == "custom-valid-key"
|
||||
|
||||
return APIKeyAuthenticator(header_name="X-API-Key", validator=custom_validator)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_key_auth_async_validator():
|
||||
"""Create APIKeyAuthenticator with async custom validator."""
|
||||
|
||||
async def async_validator(api_key: str) -> bool:
|
||||
return api_key == "async-valid-key"
|
||||
|
||||
return APIKeyAuthenticator(header_name="X-API-Key", validator=async_validator)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def api_key_auth_object_validator():
|
||||
"""Create APIKeyAuthenticator that returns objects from validator."""
|
||||
|
||||
async def object_validator(api_key: str):
|
||||
if api_key == "user-key":
|
||||
return {"user_id": "123", "permissions": ["read", "write"]}
|
||||
return None
|
||||
|
||||
return APIKeyAuthenticator(header_name="X-API-Key", validator=object_validator)
|
||||
|
||||
|
||||
# ========== Basic Initialization Tests ========== #
|
||||
|
||||
|
||||
def test_init_with_expected_token():
|
||||
"""Test initialization with expected token."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="test-token")
|
||||
|
||||
assert auth.model.name == "X-API-Key"
|
||||
assert auth.expected_token == "test-token"
|
||||
assert auth.custom_validator is None
|
||||
assert auth.status_if_missing == HTTP_401_UNAUTHORIZED
|
||||
assert auth.message_if_invalid == "Invalid API key"
|
||||
|
||||
|
||||
def test_init_with_custom_validator():
|
||||
"""Test initialization with custom validator."""
|
||||
|
||||
def validator(key: str) -> bool:
|
||||
return True
|
||||
|
||||
auth = APIKeyAuthenticator(header_name="Authorization", validator=validator)
|
||||
|
||||
assert auth.model.name == "Authorization"
|
||||
assert auth.expected_token is None
|
||||
assert auth.custom_validator == validator
|
||||
assert auth.status_if_missing == HTTP_401_UNAUTHORIZED
|
||||
assert auth.message_if_invalid == "Invalid API key"
|
||||
|
||||
|
||||
def test_init_with_custom_parameters():
|
||||
"""Test initialization with custom status and message."""
|
||||
auth = APIKeyAuthenticator(
|
||||
header_name="X-Custom-Key",
|
||||
expected_token="token",
|
||||
status_if_missing=HTTP_403_FORBIDDEN,
|
||||
message_if_invalid="Access denied",
|
||||
)
|
||||
|
||||
assert auth.model.name == "X-Custom-Key"
|
||||
assert auth.status_if_missing == HTTP_403_FORBIDDEN
|
||||
assert auth.message_if_invalid == "Access denied"
|
||||
|
||||
|
||||
def test_scheme_name_generation():
|
||||
"""Test that scheme_name is generated correctly."""
|
||||
auth = APIKeyAuthenticator(header_name="X-Custom-Header", expected_token="token")
|
||||
|
||||
assert auth.scheme_name == "APIKeyAuthenticator-X-Custom-Header"
|
||||
|
||||
|
||||
# ========== Authentication Flow Tests ========== #
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_missing(api_key_auth, mock_request):
|
||||
"""Test behavior when API key is missing from request."""
|
||||
# Mock the parent class method to return None (no API key)
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await api_key_auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "No API key in request"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_valid(api_key_auth, mock_request):
|
||||
"""Test behavior with valid API key."""
|
||||
# Mock the parent class to return the API key
|
||||
with patch.object(
|
||||
api_key_auth.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="test-secret-token",
|
||||
):
|
||||
result = await api_key_auth(mock_request)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_invalid(api_key_auth, mock_request):
|
||||
"""Test behavior with invalid API key."""
|
||||
# Mock the parent class to return an invalid API key
|
||||
with patch.object(
|
||||
api_key_auth.__class__.__bases__[0], "__call__", return_value="invalid-token"
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await api_key_auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
# ========== Custom Validator Tests ========== #
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_status_and_message(mock_request):
|
||||
"""Test custom status code and message."""
|
||||
auth = APIKeyAuthenticator(
|
||||
header_name="X-API-Key",
|
||||
expected_token="valid-token",
|
||||
status_if_missing=HTTP_403_FORBIDDEN,
|
||||
message_if_invalid="Access forbidden",
|
||||
)
|
||||
|
||||
# Test missing key
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_403_FORBIDDEN
|
||||
assert exc_info.value.detail == "No API key in request"
|
||||
|
||||
# Test invalid key
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value="invalid-token"
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_403_FORBIDDEN
|
||||
assert exc_info.value.detail == "Access forbidden"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_sync_validator(api_key_auth_custom_validator, mock_request):
|
||||
"""Test with custom synchronous validator."""
|
||||
# Mock the parent class to return the API key
|
||||
with patch.object(
|
||||
api_key_auth_custom_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="custom-valid-key",
|
||||
):
|
||||
result = await api_key_auth_custom_validator(mock_request)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_sync_validator_invalid(
|
||||
api_key_auth_custom_validator, mock_request
|
||||
):
|
||||
"""Test custom synchronous validator with invalid key."""
|
||||
with patch.object(
|
||||
api_key_auth_custom_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="invalid-key",
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await api_key_auth_custom_validator(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_async_validator(api_key_auth_async_validator, mock_request):
|
||||
"""Test with custom async validator."""
|
||||
with patch.object(
|
||||
api_key_auth_async_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="async-valid-key",
|
||||
):
|
||||
result = await api_key_auth_async_validator(mock_request)
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_custom_async_validator_invalid(
|
||||
api_key_auth_async_validator, mock_request
|
||||
):
|
||||
"""Test custom async validator with invalid key."""
|
||||
with patch.object(
|
||||
api_key_auth_async_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="invalid-key",
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await api_key_auth_async_validator(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_validator_returns_object(api_key_auth_object_validator, mock_request):
|
||||
"""Test validator that returns an object instead of boolean."""
|
||||
with patch.object(
|
||||
api_key_auth_object_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="user-key",
|
||||
):
|
||||
result = await api_key_auth_object_validator(mock_request)
|
||||
|
||||
expected_result = {"user_id": "123", "permissions": ["read", "write"]}
|
||||
assert result == expected_result
|
||||
# Verify the object is stored in request state
|
||||
assert mock_request.state.api_key == expected_result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_validator_returns_none(api_key_auth_object_validator, mock_request):
|
||||
"""Test validator that returns None (falsy)."""
|
||||
with patch.object(
|
||||
api_key_auth_object_validator.__class__.__bases__[0],
|
||||
"__call__",
|
||||
return_value="invalid-key",
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await api_key_auth_object_validator(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_validator_database_lookup_simulation(mock_request):
|
||||
"""Test simulation of database lookup validator."""
|
||||
# Simulate database records
|
||||
valid_api_keys = {
|
||||
"key123": {"user_id": "user1", "active": True},
|
||||
"key456": {"user_id": "user2", "active": False},
|
||||
}
|
||||
|
||||
async def db_validator(api_key: str):
|
||||
record = valid_api_keys.get(api_key)
|
||||
return record if record and record["active"] else None
|
||||
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", validator=db_validator)
|
||||
|
||||
# Test valid active key
|
||||
with patch.object(auth.__class__.__bases__[0], "__call__", return_value="key123"):
|
||||
result = await auth(mock_request)
|
||||
assert result == {"user_id": "user1", "active": True}
|
||||
assert mock_request.state.api_key == {"user_id": "user1", "active": True}
|
||||
|
||||
# Test inactive key
|
||||
mock_request.state = Mock() # Reset state
|
||||
with patch.object(auth.__class__.__bases__[0], "__call__", return_value="key456"):
|
||||
with pytest.raises(HTTPException):
|
||||
await auth(mock_request)
|
||||
|
||||
# Test non-existent key
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value="nonexistent"
|
||||
):
|
||||
with pytest.raises(HTTPException):
|
||||
await auth(mock_request)
|
||||
|
||||
|
||||
# ========== Default Validator Tests ========== #
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_validator_key_valid(api_key_auth):
|
||||
"""Test default validator with valid token."""
|
||||
result = await api_key_auth.default_validator("test-secret-token")
|
||||
assert result is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_validator_key_invalid(api_key_auth):
|
||||
"""Test default validator with invalid token."""
|
||||
result = await api_key_auth.default_validator("wrong-token")
|
||||
assert result is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_validator_missing_expected_token():
|
||||
"""Test default validator when expected_token is not set."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key")
|
||||
|
||||
with pytest.raises(MissingConfigError) as exc_info:
|
||||
await auth.default_validator("any-token")
|
||||
|
||||
assert "expected_token is not set" in str(exc_info.value)
|
||||
assert "either specify it or provide a custom validator" in str(exc_info.value)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_default_validator_uses_constant_time_comparison(api_key_auth):
|
||||
"""
|
||||
Test that default validator uses secrets.compare_digest for timing attack protection
|
||||
"""
|
||||
with patch("secrets.compare_digest") as mock_compare:
|
||||
mock_compare.return_value = True
|
||||
|
||||
await api_key_auth.default_validator("test-token")
|
||||
|
||||
mock_compare.assert_called_once_with("test-token", "test-secret-token")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_empty(mock_request):
|
||||
"""Test behavior with empty string API key."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
with patch.object(auth.__class__.__bases__[0], "__call__", return_value=""):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_whitespace_only(mock_request):
|
||||
"""Test behavior with whitespace-only API key."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=" \t\n "
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_very_long(mock_request):
|
||||
"""Test behavior with extremely long API key (potential DoS protection)."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
# Create a very long API key (10MB)
|
||||
long_api_key = "a" * (10 * 1024 * 1024)
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=long_api_key
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_null_bytes(mock_request):
|
||||
"""Test behavior with API key containing null bytes."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
api_key_with_null = "valid\x00token"
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=api_key_with_null
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_control_characters(mock_request):
|
||||
"""Test behavior with API key containing control characters."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
# API key with various control characters
|
||||
api_key_with_control = "valid\r\n\t\x1b[31mtoken"
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=api_key_with_control
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_unicode_characters(mock_request):
|
||||
"""Test behavior with Unicode characters in API key."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
# API key with Unicode characters
|
||||
unicode_api_key = "validтокен🔑"
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=unicode_api_key
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_unicode_characters_normalization_attack(mock_request):
|
||||
"""Test that Unicode normalization doesn't bypass validation."""
|
||||
# Create auth with composed Unicode character
|
||||
auth = APIKeyAuthenticator(
|
||||
header_name="X-API-Key", expected_token="café" # é is composed
|
||||
)
|
||||
|
||||
# Try with decomposed version (c + a + f + e + ´)
|
||||
decomposed_key = "cafe\u0301" # é as combining character
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=decomposed_key
|
||||
):
|
||||
# Should fail because secrets.compare_digest doesn't normalize
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_binary_data(mock_request):
|
||||
"""Test behavior with binary data in API key."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
# Binary data that might cause encoding issues
|
||||
binary_api_key = bytes([0xFF, 0xFE, 0xFD, 0xFC, 0x80, 0x81]).decode(
|
||||
"latin1", errors="ignore"
|
||||
)
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=binary_api_key
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_key_with_regex_dos_attack_pattern(mock_request):
|
||||
"""Test behavior with API key of repeated characters (pattern attack)."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
# Pattern that might cause regex DoS in poorly implemented validators
|
||||
repeated_key = "a" * 1000 + "b" * 1000 + "c" * 1000
|
||||
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=repeated_key
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_keys_with_newline_variations(mock_request):
|
||||
"""Test different newline characters in API key."""
|
||||
auth = APIKeyAuthenticator(header_name="X-API-Key", expected_token="valid-token")
|
||||
|
||||
newline_variations = [
|
||||
"valid\ntoken", # Unix newline
|
||||
"valid\r\ntoken", # Windows newline
|
||||
"valid\rtoken", # Mac newline
|
||||
"valid\x85token", # NEL (Next Line)
|
||||
"valid\x0Btoken", # Vertical Tab
|
||||
"valid\x0Ctoken", # Form Feed
|
||||
]
|
||||
|
||||
for api_key in newline_variations:
|
||||
with patch.object(
|
||||
auth.__class__.__bases__[0], "__call__", return_value=api_key
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await auth(mock_request)
|
||||
|
||||
assert exc_info.value.status_code == HTTP_401_UNAUTHORIZED
|
||||
assert exc_info.value.detail == "Invalid API key"
|
||||
@@ -7,12 +7,10 @@ import prisma
|
||||
import backend.data.block
|
||||
from backend.blocks import load_all_blocks
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.block import Block, BlockCategory, BlockSchema
|
||||
from backend.data.credit import get_block_costs
|
||||
from backend.data.block import Block, BlockCategory, BlockInfo, BlockSchema
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.server.v2.builder.model import (
|
||||
BlockCategoryResponse,
|
||||
BlockData,
|
||||
BlockResponse,
|
||||
BlockType,
|
||||
CountResponse,
|
||||
@@ -25,7 +23,7 @@ from backend.util.models import Pagination
|
||||
logger = logging.getLogger(__name__)
|
||||
llm_models = [name.name.lower().replace("_", " ") for name in LlmModel]
|
||||
_static_counts_cache: dict | None = None
|
||||
_suggested_blocks: list[BlockData] | None = None
|
||||
_suggested_blocks: list[BlockInfo] | None = None
|
||||
|
||||
|
||||
def get_block_categories(category_blocks: int = 3) -> list[BlockCategoryResponse]:
|
||||
@@ -53,7 +51,7 @@ def get_block_categories(category_blocks: int = 3) -> list[BlockCategoryResponse
|
||||
|
||||
# Append if the category has less than the specified number of blocks
|
||||
if len(categories[category].blocks) < category_blocks:
|
||||
categories[category].blocks.append(block.to_dict())
|
||||
categories[category].blocks.append(block.get_info())
|
||||
|
||||
# Sort categories by name
|
||||
return sorted(categories.values(), key=lambda x: x.name)
|
||||
@@ -109,10 +107,8 @@ def get_blocks(
|
||||
take -= 1
|
||||
blocks.append(block)
|
||||
|
||||
costs = get_block_costs()
|
||||
|
||||
return BlockResponse(
|
||||
blocks=[{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks],
|
||||
blocks=[b.get_info() for b in blocks],
|
||||
pagination=Pagination(
|
||||
total_items=total,
|
||||
total_pages=(total + page_size - 1) // page_size,
|
||||
@@ -174,11 +170,9 @@ def search_blocks(
|
||||
take -= 1
|
||||
blocks.append(block)
|
||||
|
||||
costs = get_block_costs()
|
||||
|
||||
return SearchBlocksResponse(
|
||||
blocks=BlockResponse(
|
||||
blocks=[{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks],
|
||||
blocks=[b.get_info() for b in blocks],
|
||||
pagination=Pagination(
|
||||
total_items=total,
|
||||
total_pages=(total + page_size - 1) // page_size,
|
||||
@@ -323,7 +317,7 @@ def _get_all_providers() -> dict[ProviderName, Provider]:
|
||||
return providers
|
||||
|
||||
|
||||
async def get_suggested_blocks(count: int = 5) -> list[BlockData]:
|
||||
async def get_suggested_blocks(count: int = 5) -> list[BlockInfo]:
|
||||
global _suggested_blocks
|
||||
|
||||
if _suggested_blocks is not None and len(_suggested_blocks) >= count:
|
||||
@@ -351,7 +345,7 @@ async def get_suggested_blocks(count: int = 5) -> list[BlockData]:
|
||||
|
||||
# Get the top blocks based on execution count
|
||||
# But ignore Input and Output blocks
|
||||
blocks: list[tuple[BlockData, int]] = []
|
||||
blocks: list[tuple[BlockInfo, int]] = []
|
||||
|
||||
for block_type in load_all_blocks().values():
|
||||
block: Block[BlockSchema, BlockSchema] = block_type()
|
||||
@@ -366,7 +360,7 @@ async def get_suggested_blocks(count: int = 5) -> list[BlockData]:
|
||||
(row["execution_count"] for row in results if row["block_id"] == block.id),
|
||||
0,
|
||||
)
|
||||
blocks.append((block.to_dict(), execution_count))
|
||||
blocks.append((block.get_info(), execution_count))
|
||||
# Sort blocks by execution count
|
||||
blocks.sort(key=lambda x: x[1], reverse=True)
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from typing import Any, Literal
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.model as store_model
|
||||
from backend.data.block import BlockInfo
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
|
||||
@@ -16,29 +17,27 @@ FilterType = Literal[
|
||||
|
||||
BlockType = Literal["all", "input", "action", "output"]
|
||||
|
||||
BlockData = dict[str, Any]
|
||||
|
||||
|
||||
# Suggestions
|
||||
class SuggestionsResponse(BaseModel):
|
||||
otto_suggestions: list[str]
|
||||
recent_searches: list[str]
|
||||
providers: list[ProviderName]
|
||||
top_blocks: list[BlockData]
|
||||
top_blocks: list[BlockInfo]
|
||||
|
||||
|
||||
# All blocks
|
||||
class BlockCategoryResponse(BaseModel):
|
||||
name: str
|
||||
total_blocks: int
|
||||
blocks: list[BlockData]
|
||||
blocks: list[BlockInfo]
|
||||
|
||||
model_config = {"use_enum_values": False} # <== use enum names like "AI"
|
||||
|
||||
|
||||
# Input/Action/Output and see all for block categories
|
||||
class BlockResponse(BaseModel):
|
||||
blocks: list[BlockData]
|
||||
blocks: list[BlockInfo]
|
||||
pagination: Pagination
|
||||
|
||||
|
||||
@@ -71,7 +70,7 @@ class SearchBlocksResponse(BaseModel):
|
||||
|
||||
|
||||
class SearchResponse(BaseModel):
|
||||
items: list[BlockData | library_model.LibraryAgent | store_model.StoreAgent]
|
||||
items: list[BlockInfo | library_model.LibraryAgent | store_model.StoreAgent]
|
||||
total_items: dict[FilterType, int]
|
||||
page: int
|
||||
more_pages: bool
|
||||
|
||||
@@ -64,6 +64,9 @@ class LibraryAgent(pydantic.BaseModel):
|
||||
# Indicates if this agent is the latest version
|
||||
is_latest_version: bool
|
||||
|
||||
# Recommended schedule cron (from marketplace agents)
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(
|
||||
agent: prisma.models.LibraryAgent,
|
||||
@@ -130,6 +133,7 @@ class LibraryAgent(pydantic.BaseModel):
|
||||
new_output=new_output,
|
||||
can_access_graph=can_access_graph,
|
||||
is_latest_version=is_latest_version,
|
||||
recommended_schedule_cron=agent.AgentGraph.recommendedScheduleCron,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -50,6 +50,7 @@ async def test_get_library_agents_success(
|
||||
credentials_input_schema={"type": "object", "properties": {}},
|
||||
has_external_trigger=False,
|
||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||
recommended_schedule_cron=None,
|
||||
new_output=False,
|
||||
can_access_graph=True,
|
||||
is_latest_version=True,
|
||||
@@ -69,6 +70,7 @@ async def test_get_library_agents_success(
|
||||
credentials_input_schema={"type": "object", "properties": {}},
|
||||
has_external_trigger=False,
|
||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||
recommended_schedule_cron=None,
|
||||
new_output=False,
|
||||
can_access_graph=False,
|
||||
is_latest_version=True,
|
||||
|
||||
@@ -183,6 +183,13 @@ async def get_store_agent_details(
|
||||
store_listing.hasApprovedVersion if store_listing else False
|
||||
)
|
||||
|
||||
if store_listing and store_listing.ActiveVersion:
|
||||
recommended_schedule_cron = (
|
||||
store_listing.ActiveVersion.recommendedScheduleCron
|
||||
)
|
||||
else:
|
||||
recommended_schedule_cron = None
|
||||
|
||||
logger.debug(f"Found agent details for {username}/{agent_name}")
|
||||
return backend.server.v2.store.model.StoreAgentDetails(
|
||||
store_listing_version_id=agent.storeListingVersionId,
|
||||
@@ -201,6 +208,7 @@ async def get_store_agent_details(
|
||||
last_updated=agent.updated_at,
|
||||
active_version_id=active_version_id,
|
||||
has_approved_version=has_approved_version,
|
||||
recommended_schedule_cron=recommended_schedule_cron,
|
||||
)
|
||||
except backend.server.v2.store.exceptions.AgentNotFoundError:
|
||||
raise
|
||||
@@ -562,6 +570,7 @@ async def create_store_submission(
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Initial Submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Create the first (and only) store listing and thus submission as a normal user
|
||||
@@ -655,6 +664,7 @@ async def create_store_submission(
|
||||
submissionStatus=prisma.enums.SubmissionStatus.PENDING,
|
||||
submittedAt=datetime.now(tz=timezone.utc),
|
||||
changesSummary=changes_summary,
|
||||
recommendedScheduleCron=recommended_schedule_cron,
|
||||
)
|
||||
]
|
||||
},
|
||||
@@ -710,6 +720,7 @@ async def edit_store_submission(
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Update submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Edit an existing store listing submission.
|
||||
@@ -789,6 +800,7 @@ async def edit_store_submission(
|
||||
sub_heading=sub_heading,
|
||||
categories=categories,
|
||||
changes_summary=changes_summary,
|
||||
recommended_schedule_cron=recommended_schedule_cron,
|
||||
)
|
||||
|
||||
# For PENDING submissions, we can update the existing version
|
||||
@@ -804,6 +816,7 @@ async def edit_store_submission(
|
||||
categories=categories,
|
||||
subHeading=sub_heading,
|
||||
changesSummary=changes_summary,
|
||||
recommendedScheduleCron=recommended_schedule_cron,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -866,6 +879,7 @@ async def create_store_version(
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Initial submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Create a new version for an existing store listing
|
||||
@@ -935,6 +949,7 @@ async def create_store_version(
|
||||
submissionStatus=prisma.enums.SubmissionStatus.PENDING,
|
||||
submittedAt=datetime.now(),
|
||||
changesSummary=changes_summary,
|
||||
recommendedScheduleCron=recommended_schedule_cron,
|
||||
storeListingId=store_listing_id,
|
||||
)
|
||||
)
|
||||
@@ -1150,6 +1165,7 @@ async def get_my_agents(
|
||||
last_edited=graph.updatedAt or graph.createdAt,
|
||||
description=graph.description or "",
|
||||
agent_image=library_agent.imageUrl,
|
||||
recommended_schedule_cron=graph.recommendedScheduleCron,
|
||||
)
|
||||
for library_agent in library_agents
|
||||
if (graph := library_agent.AgentGraph)
|
||||
@@ -1351,6 +1367,21 @@ async def review_store_submission(
|
||||
]
|
||||
)
|
||||
|
||||
# Update the AgentGraph with store listing data
|
||||
await prisma.models.AgentGraph.prisma().update(
|
||||
where={
|
||||
"graphVersionId": {
|
||||
"id": store_listing_version.agentGraphId,
|
||||
"version": store_listing_version.agentGraphVersion,
|
||||
}
|
||||
},
|
||||
data={
|
||||
"name": store_listing_version.name,
|
||||
"description": store_listing_version.description,
|
||||
"recommendedScheduleCron": store_listing_version.recommendedScheduleCron,
|
||||
},
|
||||
)
|
||||
|
||||
await prisma.models.StoreListing.prisma(tx).update(
|
||||
where={"id": store_listing_version.StoreListing.id},
|
||||
data={
|
||||
|
||||
@@ -90,6 +90,8 @@ async def test_get_store_agent_details(mocker):
|
||||
mock_store_listing = mocker.MagicMock()
|
||||
mock_store_listing.activeVersionId = "active-version-id"
|
||||
mock_store_listing.hasApprovedVersion = True
|
||||
mock_store_listing.ActiveVersion = mocker.MagicMock()
|
||||
mock_store_listing.ActiveVersion.recommendedScheduleCron = None
|
||||
|
||||
# Mock StoreAgent prisma call
|
||||
mock_store_agent = mocker.patch("prisma.models.StoreAgent.prisma")
|
||||
|
||||
@@ -14,6 +14,7 @@ class MyAgent(pydantic.BaseModel):
|
||||
agent_image: str | None = None
|
||||
description: str
|
||||
last_edited: datetime.datetime
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
|
||||
class MyAgentsResponse(pydantic.BaseModel):
|
||||
@@ -53,6 +54,7 @@ class StoreAgentDetails(pydantic.BaseModel):
|
||||
rating: float
|
||||
versions: list[str]
|
||||
last_updated: datetime.datetime
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
active_version_id: str | None = None
|
||||
has_approved_version: bool = False
|
||||
@@ -157,6 +159,7 @@ class StoreSubmissionRequest(pydantic.BaseModel):
|
||||
description: str = ""
|
||||
categories: list[str] = []
|
||||
changes_summary: str | None = None
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
|
||||
class StoreSubmissionEditRequest(pydantic.BaseModel):
|
||||
@@ -167,6 +170,7 @@ class StoreSubmissionEditRequest(pydantic.BaseModel):
|
||||
description: str = ""
|
||||
categories: list[str] = []
|
||||
changes_summary: str | None = None
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
|
||||
class ProfileDetails(pydantic.BaseModel):
|
||||
|
||||
@@ -535,6 +535,7 @@ async def create_submission(
|
||||
sub_heading=submission_request.sub_heading,
|
||||
categories=submission_request.categories,
|
||||
changes_summary=submission_request.changes_summary or "Initial Submission",
|
||||
recommended_schedule_cron=submission_request.recommended_schedule_cron,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Exception occurred whilst creating store submission")
|
||||
@@ -580,6 +581,7 @@ async def edit_submission(
|
||||
sub_heading=submission_request.sub_heading,
|
||||
categories=submission_request.categories,
|
||||
changes_summary=submission_request.changes_summary,
|
||||
recommended_schedule_cron=submission_request.recommended_schedule_cron,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -295,14 +295,17 @@ async def health():
|
||||
|
||||
class WebsocketServer(AppProcess):
|
||||
def run(self):
|
||||
logger.info(f"CORS allow origins: {settings.config.backend_cors_allow_origins}")
|
||||
server_app = CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
if settings.config.enable_cors_all_origins:
|
||||
server_app = CORSMiddleware(
|
||||
app=app,
|
||||
allow_origins=settings.config.backend_cors_allow_origins,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
else:
|
||||
logger.info("CORS is disabled")
|
||||
server_app = app
|
||||
|
||||
uvicorn.run(
|
||||
server_app,
|
||||
|
||||
@@ -33,7 +33,7 @@ def sentry_init():
|
||||
)
|
||||
|
||||
|
||||
def sentry_capture_error(error: Exception):
|
||||
def sentry_capture_error(error: BaseException):
|
||||
sentry_sdk.capture_exception(error)
|
||||
sentry_sdk.flush()
|
||||
|
||||
|
||||
@@ -76,6 +76,14 @@ class AppProcess(ABC):
|
||||
logger.warning(
|
||||
f"[{self.service_name}] Termination request: {type(e).__name__}; {e} executing cleanup."
|
||||
)
|
||||
# Send error to Sentry before cleanup
|
||||
if not isinstance(e, (KeyboardInterrupt, SystemExit)):
|
||||
try:
|
||||
from backend.util.metrics import sentry_capture_error
|
||||
|
||||
sentry_capture_error(e)
|
||||
except Exception:
|
||||
pass # Silently ignore if Sentry isn't available
|
||||
finally:
|
||||
self.cleanup()
|
||||
logger.info(f"[{self.service_name}] Terminated.")
|
||||
|
||||
@@ -368,6 +368,11 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="Maximum message size limit for communication with the message bus",
|
||||
)
|
||||
|
||||
enable_cors_all_origins: bool = Field(
|
||||
default=True,
|
||||
description="Whether to enable all CORS origins",
|
||||
)
|
||||
|
||||
backend_cors_allow_origins: List[str] = Field(default=["http://localhost:3000"])
|
||||
|
||||
@field_validator("backend_cors_allow_origins")
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
-- These changes are part of improvements to our API key system.
|
||||
-- See https://github.com/Significant-Gravitas/AutoGPT/pull/10796 for context.
|
||||
|
||||
-- Add 'salt' column for Scrypt hashing
|
||||
ALTER TABLE "APIKey" ADD COLUMN "salt" TEXT;
|
||||
|
||||
-- Rename columns for clarity
|
||||
ALTER TABLE "APIKey" RENAME COLUMN "key" TO "hash";
|
||||
ALTER TABLE "APIKey" RENAME COLUMN "prefix" TO "head";
|
||||
ALTER TABLE "APIKey" RENAME COLUMN "postfix" TO "tail";
|
||||
@@ -0,0 +1,3 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "StoreListingVersion" ADD COLUMN "recommendedScheduleCron" TEXT;
|
||||
ALTER TABLE "AgentGraph" ADD COLUMN "recommendedScheduleCron" TEXT;
|
||||
1
autogpt_platform/backend/poetry.lock
generated
@@ -403,6 +403,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
colorama = "^0.4.6"
|
||||
cryptography = "^45.0"
|
||||
expiringdict = "^1.2.2"
|
||||
fastapi = "^0.116.1"
|
||||
google-cloud-logging = "^3.12.1"
|
||||
|
||||
@@ -110,6 +110,7 @@ model AgentGraph {
|
||||
|
||||
name String?
|
||||
description String?
|
||||
recommendedScheduleCron String?
|
||||
|
||||
isActive Boolean @default(true)
|
||||
|
||||
@@ -785,6 +786,8 @@ model StoreListingVersion {
|
||||
reviewComments String? // Comments visible to creator
|
||||
reviewedAt DateTime?
|
||||
|
||||
recommendedScheduleCron String? // cron expression like "0 9 * * *"
|
||||
|
||||
// Reviews for this specific version
|
||||
Reviews StoreListingReview[]
|
||||
|
||||
@@ -828,11 +831,13 @@ enum APIKeyPermission {
|
||||
}
|
||||
|
||||
model APIKey {
|
||||
id String @id @default(uuid())
|
||||
name String
|
||||
prefix String // First 8 chars for identification
|
||||
postfix String
|
||||
key String @unique // Hashed key
|
||||
id String @id @default(uuid())
|
||||
name String
|
||||
head String // First few chars for identification
|
||||
tail String
|
||||
hash String @unique
|
||||
salt String? // null for legacy unsalted keys
|
||||
|
||||
status APIKeyStatus @default(ACTIVE)
|
||||
permissions APIKeyPermission[]
|
||||
|
||||
@@ -846,7 +851,7 @@ model APIKey {
|
||||
userId String
|
||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([prefix, name])
|
||||
@@index([head, name])
|
||||
@@index([userId, status])
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"1.1.0"
|
||||
],
|
||||
"last_updated": "2023-01-01T00:00:00",
|
||||
"recommended_schedule_cron": null,
|
||||
"active_version_id": null,
|
||||
"has_approved_version": false
|
||||
}
|
||||
@@ -23,6 +23,7 @@
|
||||
"required": [],
|
||||
"type": "object"
|
||||
},
|
||||
"recommended_schedule_cron": null,
|
||||
"sub_graphs": [],
|
||||
"trigger_setup_info": null,
|
||||
"user_id": "test-user-id",
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
"required": [],
|
||||
"type": "object"
|
||||
},
|
||||
"recommended_schedule_cron": null,
|
||||
"sub_graphs": [],
|
||||
"trigger_setup_info": null,
|
||||
"user_id": "test-user-id",
|
||||
|
||||
@@ -27,7 +27,8 @@
|
||||
"trigger_setup_info": null,
|
||||
"new_output": false,
|
||||
"can_access_graph": true,
|
||||
"is_latest_version": true
|
||||
"is_latest_version": true,
|
||||
"recommended_schedule_cron": null
|
||||
},
|
||||
{
|
||||
"id": "test-agent-2",
|
||||
@@ -56,7 +57,8 @@
|
||||
"trigger_setup_info": null,
|
||||
"new_output": false,
|
||||
"can_access_graph": false,
|
||||
"is_latest_version": true
|
||||
"is_latest_version": true,
|
||||
"recommended_schedule_cron": null
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -23,7 +23,7 @@ from typing import Any, Dict, List
|
||||
|
||||
from faker import Faker
|
||||
|
||||
from backend.data.api_key import generate_api_key
|
||||
from backend.data.api_key import create_api_key
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.db import prisma
|
||||
from backend.data.graph import Graph, Link, Node, create_graph
|
||||
@@ -466,7 +466,7 @@ class TestDataCreator:
|
||||
|
||||
try:
|
||||
# Use the API function to create API key
|
||||
api_key, _ = await generate_api_key(
|
||||
api_key, _ = await create_api_key(
|
||||
name=faker.word(),
|
||||
user_id=user["id"],
|
||||
permissions=[
|
||||
|
||||
@@ -370,7 +370,7 @@ class TestProviderBuilder:
|
||||
|
||||
def test_provider_builder_with_base_cost(self):
|
||||
"""Test building a provider with base costs."""
|
||||
from backend.data.cost import BlockCostType
|
||||
from backend.data.block import BlockCostType
|
||||
|
||||
provider = (
|
||||
ProviderBuilder("cost_test")
|
||||
@@ -411,7 +411,7 @@ class TestProviderBuilder:
|
||||
|
||||
def test_provider_builder_complete_example(self):
|
||||
"""Test building a complete provider with all features."""
|
||||
from backend.data.cost import BlockCostType
|
||||
from backend.data.block import BlockCostType
|
||||
|
||||
class TestOAuth(BaseOAuthHandler):
|
||||
PROVIDER_NAME = ProviderName.GITHUB
|
||||
|
||||
@@ -21,6 +21,7 @@ import random
|
||||
from datetime import datetime
|
||||
|
||||
import prisma.enums
|
||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||
from faker import Faker
|
||||
from prisma import Json, Prisma
|
||||
from prisma.types import (
|
||||
@@ -30,7 +31,6 @@ from prisma.types import (
|
||||
AgentNodeLinkCreateInput,
|
||||
AnalyticsDetailsCreateInput,
|
||||
AnalyticsMetricsCreateInput,
|
||||
APIKeyCreateInput,
|
||||
CreditTransactionCreateInput,
|
||||
IntegrationWebhookCreateInput,
|
||||
ProfileCreateInput,
|
||||
@@ -544,20 +544,22 @@ async def main():
|
||||
# Insert APIKeys
|
||||
print(f"Inserting {NUM_USERS} api keys")
|
||||
for user in users:
|
||||
api_key = APIKeySmith().generate_key()
|
||||
await db.apikey.create(
|
||||
data=APIKeyCreateInput(
|
||||
name=faker.word(),
|
||||
prefix=str(faker.uuid4())[:8],
|
||||
postfix=str(faker.uuid4())[-8:],
|
||||
key=str(faker.sha256()),
|
||||
status=prisma.enums.APIKeyStatus.ACTIVE,
|
||||
permissions=[
|
||||
data={
|
||||
"name": faker.word(),
|
||||
"head": api_key.head,
|
||||
"tail": api_key.tail,
|
||||
"hash": api_key.hash,
|
||||
"salt": api_key.salt,
|
||||
"status": prisma.enums.APIKeyStatus.ACTIVE,
|
||||
"permissions": [
|
||||
prisma.enums.APIKeyPermission.EXECUTE_GRAPH,
|
||||
prisma.enums.APIKeyPermission.READ_GRAPH,
|
||||
],
|
||||
description=faker.text(),
|
||||
userId=user.id,
|
||||
)
|
||||
"description": faker.text(),
|
||||
"userId": user.id,
|
||||
}
|
||||
)
|
||||
|
||||
# Refresh materialized views
|
||||
|
||||
@@ -4,3 +4,5 @@ pnpm-lock.yaml
|
||||
.auth
|
||||
build
|
||||
public
|
||||
Dockerfile
|
||||
.prettierignore
|
||||
|
||||
@@ -12,14 +12,21 @@ COPY autogpt_platform/frontend/ .
|
||||
# Allow CI to opt-in to Playwright test build-time flags
|
||||
ARG NEXT_PUBLIC_PW_TEST="false"
|
||||
ENV NEXT_PUBLIC_PW_TEST=$NEXT_PUBLIC_PW_TEST
|
||||
RUN if [ -f .env ]; then \
|
||||
ENV NODE_ENV="production"
|
||||
# Merge env files appropriately based on environment
|
||||
RUN if [ -f .env.production ]; then \
|
||||
# In CI/CD: merge defaults with production (production takes precedence)
|
||||
cat .env.default .env.production > .env.merged && mv .env.merged .env.production; \
|
||||
elif [ -f .env ]; then \
|
||||
# Local with custom .env: merge defaults with .env
|
||||
cat .env.default .env > .env.merged && mv .env.merged .env; \
|
||||
else \
|
||||
# Local without custom .env: use defaults
|
||||
cp .env.default .env; \
|
||||
fi
|
||||
RUN pnpm run generate:api
|
||||
# In CI, we want NEXT_PUBLIC_PW_TEST=true during build so Next.js inlines it
|
||||
RUN if [ "$NEXT_PUBLIC_PW_TEST" = "true" ]; then NEXT_PUBLIC_PW_TEST=true pnpm build; else pnpm build; fi
|
||||
RUN if [ "$NEXT_PUBLIC_PW_TEST" = "true" ]; then NEXT_PUBLIC_PW_TEST=true NODE_OPTIONS="--max-old-space-size=4096" pnpm build; else NODE_OPTIONS="--max-old-space-size=4096" pnpm build; fi
|
||||
|
||||
# Prod stage - based on NextJS reference Dockerfile https://github.com/vercel/next.js/blob/64271354533ed16da51be5dce85f0dbd15f17517/examples/with-docker/Dockerfile
|
||||
FROM node:21-alpine AS prod
|
||||
|
||||
@@ -2,18 +2,28 @@
|
||||
// The config you add here will be used whenever a users loads a page in their browser.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import { BehaveAs, getBehaveAs, getEnvironmentStr } from "@/lib/utils";
|
||||
import {
|
||||
AppEnv,
|
||||
BehaveAs,
|
||||
getAppEnv,
|
||||
getBehaveAs,
|
||||
getEnvironmentStr,
|
||||
} from "@/lib/utils";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
const isProductionCloud =
|
||||
process.env.NODE_ENV === "production" && getBehaveAs() === BehaveAs.CLOUD;
|
||||
const isProdOrDev = [AppEnv.PROD, AppEnv.DEV].includes(getAppEnv());
|
||||
|
||||
const isCloud = getBehaveAs() === BehaveAs.CLOUD;
|
||||
const isDisabled = process.env.DISABLE_SENTRY === "true";
|
||||
|
||||
const shouldEnable = !isDisabled && isProdOrDev && isCloud;
|
||||
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
environment: getEnvironmentStr(),
|
||||
|
||||
enabled: isProductionCloud,
|
||||
enabled: shouldEnable,
|
||||
|
||||
// Add optional integrations for additional features
|
||||
integrations: [
|
||||
@@ -56,10 +66,7 @@ Sentry.init({
|
||||
// For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
|
||||
// result in 25% of transactions being profiled (0.5*0.5=0.25)
|
||||
profilesSampleRate: 1.0,
|
||||
_experiments: {
|
||||
// Enable logs to be sent to Sentry.
|
||||
enableLogs: true,
|
||||
},
|
||||
enableLogs: true,
|
||||
});
|
||||
|
||||
export const onRouterTransitionStart = Sentry.captureRouterTransitionStart;
|
||||
|
||||
@@ -41,6 +41,26 @@ export default defineConfig({
|
||||
useInfiniteQueryParam: "page",
|
||||
},
|
||||
},
|
||||
"getV2Get builder blocks": {
|
||||
query: {
|
||||
useInfinite: true,
|
||||
useInfiniteQueryParam: "page",
|
||||
useQuery: true,
|
||||
},
|
||||
},
|
||||
"getV2Get builder integration providers": {
|
||||
query: {
|
||||
useInfinite: true,
|
||||
useInfiniteQueryParam: "page",
|
||||
},
|
||||
},
|
||||
"getV2List store agents": {
|
||||
query: {
|
||||
useInfinite: true,
|
||||
useInfiniteQueryParam: "page",
|
||||
useQuery: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -104,32 +104,32 @@
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.1.0",
|
||||
"@playwright/test": "1.54.2",
|
||||
"@storybook/addon-a11y": "9.1.2",
|
||||
"@storybook/addon-docs": "9.1.2",
|
||||
"@storybook/addon-links": "9.1.2",
|
||||
"@storybook/addon-onboarding": "9.1.2",
|
||||
"@storybook/nextjs": "9.1.2",
|
||||
"@tanstack/eslint-plugin-query": "5.83.1",
|
||||
"@tanstack/react-query-devtools": "5.84.2",
|
||||
"@chromatic-com/storybook": "4.1.1",
|
||||
"@playwright/test": "1.55.0",
|
||||
"@storybook/addon-a11y": "9.1.5",
|
||||
"@storybook/addon-docs": "9.1.5",
|
||||
"@storybook/addon-links": "9.1.5",
|
||||
"@storybook/addon-onboarding": "9.1.5",
|
||||
"@storybook/nextjs": "9.1.5",
|
||||
"@tanstack/eslint-plugin-query": "5.86.0",
|
||||
"@tanstack/react-query-devtools": "5.87.3",
|
||||
"@types/canvas-confetti": "1.9.0",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/negotiator": "0.6.4",
|
||||
"@types/node": "24.2.1",
|
||||
"@types/node": "24.3.1",
|
||||
"@types/react": "18.3.17",
|
||||
"@types/react-dom": "18.3.5",
|
||||
"@types/react-modal": "3.16.3",
|
||||
"@types/react-window": "1.8.8",
|
||||
"axe-playwright": "2.1.0",
|
||||
"chromatic": "13.1.3",
|
||||
"concurrently": "9.2.0",
|
||||
"chromatic": "13.1.4",
|
||||
"concurrently": "9.2.1",
|
||||
"cross-env": "7.0.3",
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.4.6",
|
||||
"eslint-plugin-storybook": "9.1.2",
|
||||
"eslint-config-next": "15.5.2",
|
||||
"eslint-plugin-storybook": "9.1.5",
|
||||
"import-in-the-middle": "1.14.2",
|
||||
"msw": "2.10.4",
|
||||
"msw": "2.11.1",
|
||||
"msw-storybook-addon": "2.0.5",
|
||||
"orval": "7.11.2",
|
||||
"pbkdf2": "3.1.3",
|
||||
@@ -137,7 +137,7 @@
|
||||
"prettier": "3.6.2",
|
||||
"prettier-plugin-tailwindcss": "0.6.14",
|
||||
"require-in-the-middle": "7.5.2",
|
||||
"storybook": "9.1.2",
|
||||
"storybook": "9.1.5",
|
||||
"tailwindcss": "3.4.17",
|
||||
"typescript": "5.9.2"
|
||||
},
|
||||
|
||||
1708
autogpt_platform/frontend/pnpm-lock.yaml
generated
BIN
autogpt_platform/frontend/public/integrations/aiml_api.png
Normal file
|
After Width: | Height: | Size: 7.7 KiB |
BIN
autogpt_platform/frontend/public/integrations/airtable.png
Normal file
|
After Width: | Height: | Size: 1.7 KiB |
BIN
autogpt_platform/frontend/public/integrations/anthropic.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
autogpt_platform/frontend/public/integrations/apollo.png
Normal file
|
After Width: | Height: | Size: 4.5 KiB |
BIN
autogpt_platform/frontend/public/integrations/baas.png
Normal file
|
After Width: | Height: | Size: 2.7 KiB |
BIN
autogpt_platform/frontend/public/integrations/bannerbear.png
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
|
Before Width: | Height: | Size: 36 KiB After Width: | Height: | Size: 4.2 KiB |
BIN
autogpt_platform/frontend/public/integrations/d_id.png
Normal file
|
After Width: | Height: | Size: 2.6 KiB |
BIN
autogpt_platform/frontend/public/integrations/dataforseo.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 5.1 KiB |
BIN
autogpt_platform/frontend/public/integrations/e2b.png
Normal file
|
After Width: | Height: | Size: 7.2 KiB |
BIN
autogpt_platform/frontend/public/integrations/enrichlayer.png
Normal file
|
After Width: | Height: | Size: 5.0 KiB |
BIN
autogpt_platform/frontend/public/integrations/exa.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
autogpt_platform/frontend/public/integrations/fal.png
Normal file
|
After Width: | Height: | Size: 12 KiB |
BIN
autogpt_platform/frontend/public/integrations/firecrawl.png
Normal file
|
After Width: | Height: | Size: 16 KiB |
|
Before Width: | Height: | Size: 20 KiB After Width: | Height: | Size: 6.8 KiB |
|
Before Width: | Height: | Size: 55 KiB After Width: | Height: | Size: 13 KiB |
BIN
autogpt_platform/frontend/public/integrations/google_maps.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
autogpt_platform/frontend/public/integrations/groq.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
BIN
autogpt_platform/frontend/public/integrations/http.png
Normal file
|
After Width: | Height: | Size: 10 KiB |
|
Before Width: | Height: | Size: 23 KiB After Width: | Height: | Size: 5.1 KiB |
BIN
autogpt_platform/frontend/public/integrations/ideogram.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
autogpt_platform/frontend/public/integrations/jina.png
Normal file
|
After Width: | Height: | Size: 8.7 KiB |
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 5.9 KiB |
BIN
autogpt_platform/frontend/public/integrations/llama_api.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 53 KiB After Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 8.3 KiB |
|
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 5.8 KiB |
|
Before Width: | Height: | Size: 76 KiB After Width: | Height: | Size: 21 KiB |
BIN
autogpt_platform/frontend/public/integrations/nvidia.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
autogpt_platform/frontend/public/integrations/ollama.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
BIN
autogpt_platform/frontend/public/integrations/open_router.png
Normal file
|
After Width: | Height: | Size: 9.2 KiB |
BIN
autogpt_platform/frontend/public/integrations/openai.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 3.0 KiB |
|
Before Width: | Height: | Size: 14 KiB After Width: | Height: | Size: 4.0 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 4.5 KiB |
BIN
autogpt_platform/frontend/public/integrations/replicate.png
Normal file
|
After Width: | Height: | Size: 2.4 KiB |
BIN
autogpt_platform/frontend/public/integrations/revid.png
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
autogpt_platform/frontend/public/integrations/screenshotone.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
|
Before Width: | Height: | Size: 4.3 KiB After Width: | Height: | Size: 2.9 KiB |
BIN
autogpt_platform/frontend/public/integrations/slant3d.png
Normal file
|
After Width: | Height: | Size: 3.4 KiB |
BIN
autogpt_platform/frontend/public/integrations/smartlead.png
Normal file
|
After Width: | Height: | Size: 3.0 KiB |