mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
Compare commits
1 Commits
aryshare-r
...
swiftyos/a
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6a9a5b7161 |
@@ -34,7 +34,6 @@ jobs:
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
DIRECT_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
|
||||
trigger:
|
||||
|
||||
@@ -36,7 +36,6 @@ jobs:
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
DIRECT_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
trigger:
|
||||
needs: migrate
|
||||
|
||||
33
.github/workflows/platform-backend-ci.yml
vendored
33
.github/workflows/platform-backend-ci.yml
vendored
@@ -80,35 +80,18 @@ jobs:
|
||||
|
||||
- name: Install Poetry (Unix)
|
||||
run: |
|
||||
# Extract Poetry version from backend/poetry.lock
|
||||
HEAD_POETRY_VERSION=$(head -n 1 poetry.lock | grep -oP '(?<=Poetry )[0-9]+\.[0-9]+\.[0-9]+')
|
||||
echo "Found Poetry version ${HEAD_POETRY_VERSION} in backend/poetry.lock"
|
||||
|
||||
if [ -n "$BASE_REF" ]; then
|
||||
BASE_BRANCH=${BASE_REF/refs\/heads\//}
|
||||
BASE_POETRY_VERSION=$((git show "origin/$BASE_BRANCH":./poetry.lock; true) | head -n 1 | grep -oP '(?<=Poetry )[0-9]+\.[0-9]+\.[0-9]+')
|
||||
echo "Found Poetry version ${BASE_POETRY_VERSION} in backend/poetry.lock on ${BASE_REF}"
|
||||
POETRY_VERSION=$(printf '%s\n' "$HEAD_POETRY_VERSION" "$BASE_POETRY_VERSION" | sort -V | tail -n1)
|
||||
else
|
||||
POETRY_VERSION=$HEAD_POETRY_VERSION
|
||||
fi
|
||||
echo "Using Poetry version ${POETRY_VERSION}"
|
||||
|
||||
# Install Poetry
|
||||
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$POETRY_VERSION python3 -
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
if [ "${{ runner.os }}" = "macOS" ]; then
|
||||
PATH="$HOME/.local/bin:$PATH"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
env:
|
||||
BASE_REF: ${{ github.base_ref || github.event.merge_group.base_ref }}
|
||||
|
||||
- name: Check poetry.lock
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
if ! git diff --quiet --ignore-matching-lines="^# " poetry.lock; then
|
||||
if ! git diff --quiet poetry.lock; then
|
||||
echo "Error: poetry.lock not up to date."
|
||||
echo
|
||||
git diff poetry.lock
|
||||
@@ -135,7 +118,6 @@ jobs:
|
||||
run: poetry run prisma migrate dev --name updates
|
||||
env:
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
|
||||
- id: lint
|
||||
name: Run Linter
|
||||
@@ -152,13 +134,12 @@ jobs:
|
||||
env:
|
||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||
SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||
REDIS_HOST: "localhost"
|
||||
REDIS_PORT: "6379"
|
||||
REDIS_PASSWORD: "testpassword"
|
||||
REDIS_HOST: 'localhost'
|
||||
REDIS_PORT: '6379'
|
||||
REDIS_PASSWORD: 'testpassword'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
@@ -171,8 +152,8 @@ jobs:
|
||||
# If you want to replace this, you can do so by making our entire system generate
|
||||
# new credentials for each local user and update the environment variables in
|
||||
# the backend service, docker composes, and examples
|
||||
RABBITMQ_DEFAULT_USER: "rabbitmq_user_default"
|
||||
RABBITMQ_DEFAULT_PASS: "k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7"
|
||||
RABBITMQ_DEFAULT_USER: 'rabbitmq_user_default'
|
||||
RABBITMQ_DEFAULT_PASS: 'k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7'
|
||||
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v4
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
# operations-per-run: 5000
|
||||
stale-issue-message: >
|
||||
This issue has automatically been marked as _stale_ because it has not had
|
||||
any activity in the last 170 days. You can _unstale_ it by commenting or
|
||||
any activity in the last 50 days. You can _unstale_ it by commenting or
|
||||
removing the label. Otherwise, this issue will be closed in 10 days.
|
||||
stale-pr-message: >
|
||||
This pull request has automatically been marked as _stale_ because it has
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
close-issue-message: >
|
||||
This issue was closed automatically because it has been stale for 10 days
|
||||
with no activity.
|
||||
days-before-stale: 170
|
||||
days-before-stale: 100
|
||||
days-before-close: 10
|
||||
# Do not touch meta issues:
|
||||
exempt-issue-labels: meta,fridge,project management
|
||||
|
||||
@@ -140,7 +140,7 @@ repos:
|
||||
language: system
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.10.0
|
||||
rev: 23.12.1
|
||||
# Black has sensible defaults, doesn't need package context, and ignores
|
||||
# everything in .gitignore, so it works fine without any config or arguments.
|
||||
hooks:
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -32,9 +32,9 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "backend.app",
|
||||
"env": {
|
||||
"OBJC_DISABLE_INITIALIZE_FORK_SAFETY": "YES"
|
||||
},
|
||||
// "env": {
|
||||
// "ENV": "dev"
|
||||
// },
|
||||
"envFile": "${workspaceFolder}/backend/.env",
|
||||
"justMyCode": false,
|
||||
"cwd": "${workspaceFolder}/autogpt_platform/backend"
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
If you are reading this, you are probably looking for the full **[contribution guide]**,
|
||||
which is part of our [wiki].
|
||||
|
||||
Also check out our [🚀 Roadmap][roadmap] for information about our priorities and associated tasks.
|
||||
<!-- You can find our immediate priorities and their progress on our public [kanban board]. -->
|
||||
|
||||
[contribution guide]: https://github.com/Significant-Gravitas/AutoGPT/wiki/Contributing
|
||||
[wiki]: https://github.com/Significant-Gravitas/AutoGPT/wiki
|
||||
[roadmap]: https://github.com/Significant-Gravitas/AutoGPT/discussions/6971
|
||||
|
||||
@@ -15,11 +15,7 @@
|
||||
> Setting up and hosting the AutoGPT Platform yourself is a technical process.
|
||||
> If you'd rather something that just works, we recommend [joining the waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta.
|
||||
|
||||
### Updated Setup Instructions:
|
||||
We’ve moved to a fully maintained and regularly updated documentation site.
|
||||
|
||||
👉 [Follow the official self-hosting guide here](https://docs.agpt.co/platform/getting-started/)
|
||||
|
||||
https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603
|
||||
|
||||
This tutorial assumes you have Docker, VSCode, git and npm installed.
|
||||
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from .config import Settings
|
||||
from .depends import requires_admin_user, requires_user
|
||||
from .jwt_utils import parse_jwt_token
|
||||
from .middleware import APIKeyValidator, auth_middleware
|
||||
from .models import User
|
||||
|
||||
__all__ = [
|
||||
"Settings",
|
||||
"parse_jwt_token",
|
||||
"requires_user",
|
||||
"requires_admin_user",
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self):
|
||||
self.JWT_SECRET_KEY: str = os.getenv("SUPABASE_JWT_SECRET", "")
|
||||
self.ENABLE_AUTH: bool = os.getenv("ENABLE_AUTH", "false").lower() == "true"
|
||||
self.JWT_ALGORITHM: str = "HS256"
|
||||
JWT_SECRET_KEY: str = os.getenv("SUPABASE_JWT_SECRET", "")
|
||||
ENABLE_AUTH: bool = os.getenv("ENABLE_AUTH", "false").lower() == "true"
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fastapi
|
||||
|
||||
from .config import settings
|
||||
from .config import Settings
|
||||
from .middleware import auth_middleware
|
||||
from .models import DEFAULT_USER_ID, User
|
||||
|
||||
@@ -17,7 +17,7 @@ def requires_admin_user(
|
||||
|
||||
def verify_user(payload: dict | None, admin_only: bool) -> User:
|
||||
if not payload:
|
||||
if settings.ENABLE_AUTH:
|
||||
if Settings.ENABLE_AUTH:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=401, detail="Authorization header is missing"
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ logger = logging.getLogger(__name__)
|
||||
async def auth_middleware(request: Request):
|
||||
if not settings.ENABLE_AUTH:
|
||||
# If authentication is disabled, allow the request to proceed
|
||||
logger.warning("Auth disabled")
|
||||
logger.warn("Auth disabled")
|
||||
return {}
|
||||
|
||||
security = HTTPBearer()
|
||||
|
||||
@@ -8,7 +8,7 @@ from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import AGPTFormatter
|
||||
from .formatters import AGPTFormatter, StructuredLoggingFormatter
|
||||
|
||||
LOG_DIR = Path(__file__).parent.parent.parent.parent / "logs"
|
||||
LOG_FILE = "activity.log"
|
||||
@@ -81,26 +81,9 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
"""
|
||||
|
||||
config = LoggingConfig()
|
||||
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
|
||||
# Cloud logging setup
|
||||
if config.enable_cloud_logging or force_cloud_logging:
|
||||
import google.cloud.logging
|
||||
@@ -114,7 +97,28 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
transport=SyncTransport,
|
||||
)
|
||||
cloud_handler.setLevel(config.level)
|
||||
cloud_handler.setFormatter(StructuredLoggingFormatter())
|
||||
log_handlers.append(cloud_handler)
|
||||
print("Cloud logging enabled")
|
||||
else:
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
print("Console logging enabled")
|
||||
|
||||
# File logging setup
|
||||
if config.enable_file_logging:
|
||||
@@ -152,6 +156,7 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
error_log_handler.setLevel(logging.ERROR)
|
||||
error_log_handler.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True))
|
||||
log_handlers.append(error_log_handler)
|
||||
print("File logging enabled")
|
||||
|
||||
# Configure the root logger
|
||||
logging.basicConfig(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
|
||||
from colorama import Fore, Style
|
||||
from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
@@ -79,3 +80,16 @@ class AGPTFormatter(FancyConsoleFormatter):
|
||||
return remove_color_codes(super().format(record))
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class StructuredLoggingFormatter(StructuredLogHandler, logging.Formatter):
|
||||
def __init__(self):
|
||||
# Set up CloudLoggingFilter to add diagnostic info to the log records
|
||||
self.cloud_logging_filter = CloudLoggingFilter()
|
||||
|
||||
# Init StructuredLogHandler
|
||||
super().__init__()
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
self.cloud_logging_filter.filter(record)
|
||||
return super().format(record)
|
||||
|
||||
@@ -2,7 +2,6 @@ import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import uvicorn.config
|
||||
from colorama import Fore
|
||||
|
||||
|
||||
@@ -26,14 +25,3 @@ def print_attribute(
|
||||
"color": value_color,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def generate_uvicorn_config():
|
||||
"""
|
||||
Generates a uvicorn logging config that silences uvicorn's default logging and tells it to use the native logging module.
|
||||
"""
|
||||
log_config = dict(uvicorn.config.LOGGING_CONFIG)
|
||||
log_config["loggers"]["uvicorn"] = {"handlers": []}
|
||||
log_config["loggers"]["uvicorn.error"] = {"handlers": []}
|
||||
log_config["loggers"]["uvicorn.access"] = {"handlers": []}
|
||||
return log_config
|
||||
|
||||
@@ -1,59 +1,20 @@
|
||||
import inspect
|
||||
import threading
|
||||
from typing import Awaitable, Callable, ParamSpec, TypeVar, cast, overload
|
||||
from typing import Callable, ParamSpec, TypeVar
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
@overload
|
||||
def thread_cached(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def thread_cached(func: Callable[P, R]) -> Callable[P, R]: ...
|
||||
|
||||
|
||||
def thread_cached(
|
||||
func: Callable[P, R] | Callable[P, Awaitable[R]],
|
||||
) -> Callable[P, R] | Callable[P, Awaitable[R]]:
|
||||
def thread_cached(func: Callable[P, R]) -> Callable[P, R]:
|
||||
thread_local = threading.local()
|
||||
|
||||
def _clear():
|
||||
if hasattr(thread_local, "cache"):
|
||||
del thread_local.cache
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = func(*args, **kwargs)
|
||||
return cache[key]
|
||||
|
||||
if inspect.iscoroutinefunction(func):
|
||||
|
||||
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = await cast(Callable[P, Awaitable[R]], func)(
|
||||
*args, **kwargs
|
||||
)
|
||||
return cache[key]
|
||||
|
||||
setattr(async_wrapper, "clear_cache", _clear)
|
||||
return async_wrapper
|
||||
|
||||
else:
|
||||
|
||||
def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = func(*args, **kwargs)
|
||||
return cache[key]
|
||||
|
||||
setattr(sync_wrapper, "clear_cache", _clear)
|
||||
return sync_wrapper
|
||||
|
||||
|
||||
def clear_thread_cache(func: Callable) -> None:
|
||||
if clear := getattr(func, "clear_cache", None):
|
||||
clear()
|
||||
return wrapper
|
||||
|
||||
@@ -31,7 +31,7 @@ class RedisKeyedMutex:
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if lock.locked() and lock.owned():
|
||||
if lock.locked():
|
||||
lock.release()
|
||||
|
||||
def acquire(self, key: Any) -> "RedisLock":
|
||||
|
||||
810
autogpt_platform/autogpt_libs/poetry.lock
generated
810
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -14,104 +14,113 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.11.15"
|
||||
version = "3.10.5"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:078b1ce274f967951b42a65d5b7ec115b7886343a5271f2eed330458ea87bb48"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48d790d05c76b6cb93a1259b44d3f0019b61d507f8ebf0d49da3ef5ac858b05d"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e13767424425bb607931a0b9e703b95d2693650011ef8f0166b4cd80066b66b9"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ac49c32901489343b4dab064ab520f6b879a03fb4f9667c84620b66f07bed69"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:576b56a34d26ea8a8f0e1a30b8a069ba4ab121fb8eb796d1b89fedda7c74c553"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71ed71d9431e770550aab27a77ef9d30d33ce6f558ab7818be7205ae6aca635d"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c982b2cbd7b8f4b31e9faf2de09e22b060a6cd0a693f20892dda41a8fb0f46ef"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f00e7540a60460fbeaffff4a7dcf72fe8b710f8280a542a4d798273787c64e72"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9b97fae7f75a0b666ce4281627856d1b1a85477f26c2e8b266292e770c17df36"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a449c48f5b02c0c14f5310881558ca861bff8e00b1f79be4cf6a53f638464c30"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8c6e39f0bb2418f839841f92b3cd64077ff5166d724c984ab450ca08d5e51d92"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e792131352418dde7b0c598e217e89ecf6a26889f46f35f910e5544ffdebf3ae"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c28cbae1ce76dc48d0fcccb045ac21d00dcc1b306bb745910cf35585ce89404e"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9c8417a24063f35b526f8bf14f6f4bdea6f3f49850457337b6ea928901adbbc"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-win32.whl", hash = "sha256:a50b86eb9cf74fa5b6f1386e08e1520dcbe83d7dfd4c8bf6f2ca72b03d42e79f"},
|
||||
{file = "aiohttp-3.11.15-cp310-cp310-win_amd64.whl", hash = "sha256:a0361cafb50b185356a5f346c169aea1d14783df99e6da714d626b104586e0b5"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bd37d615cd26d09321bd0168305f8508778712cf38aeffeed550274fb48a2ee"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d706afcc808f6add4208dfa13f911fd93c2a3dab6be484fee4fd0602a0867e"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43625253e3dc018d34867b70909149f15f29eac0382802afe027f2fbf17bcb9c"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:437eee9e057a7907b11e4af2b18df56b6c795b28e0a3ac250691936cf6bf40eb"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec3dd04138bd30e6a3403dbd3ab5a5ccfb501597c5a95196cd816936ed55b777"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85d73479b79172e7d667b466c170ca6097a334c09ecd83c95c210546031251b5"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae3a5d9f2fbe736fec7d24be25c57aa78c2d78d96540439ea68a8abbed9906fc"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:269d145c593a65f78fb9a64dece90341561ddb2e91a96d42681132b2f706c42a"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0987dcf32e4c47f22634d32e4b0ffbc368bbcf2b33b408cd1a3d2dc0a6a5cd34"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:7cf4b2b5a0f7a738ecd759eaeaef800fc7c57683b7be9d8a43fcb86ca62701b4"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f1e0369f0dc8c895e718ce37147f56d46142d37596be183ab7a34192c5e6e4c5"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:82ddf7f642b9c0b08063f3cf4e2818b22901bce8ebad05c232d9e295e77436a0"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c7eba0f90e27ec4af64db051f35387fa17128e6eeb58ee0f2318f2627168cc2"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5a61df20fa77792e83307e266f76790f7cb67980dd476948948de212ee7ec64c"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-win32.whl", hash = "sha256:be11989cbc0728f81c0d022cef140ef8adb20d3012ad8f0ac61853bef571eb52"},
|
||||
{file = "aiohttp-3.11.15-cp311-cp311-win_amd64.whl", hash = "sha256:357355c9d51c8b12bbc7de43b27ce4b51f14cce050e00b5a87d0d5527d779395"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:433e7388b3063bba462b3362641988270b087a9ccae22390364f86b37a480c17"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d99981304065f4ea407dd7495f74f8b8c10f0e26733f8a47dc174ece73744d14"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3739daa52c0cff42f1c40f63b2fe818fcf41019d84c80a7add3224207a7060f"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fd82d0b3f73c59c80dade0ca8e0342de1ee261e147140ade65a465be670e83c"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c930064b79cc0eb63678e376b819d546b0e2360264cd7544c32119496f646f35"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:291f324f49ecede693dfb4820a412d1388cb10a2214ab60028252b505e105d6f"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65eb40e74e3126cba185da7a78815cf3a30140932193831b3bfd73c79965c723"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6d6d86443580f846ec9cf60f899b7cace34411f2ff5c95db5970047195e5bfa"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8b4d8d78fbd5290204dcf43957a2184acd5cee358f203f24a3a97f7d7984eeb7"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:94c552a4864ed292dadf1d341213374284a3af72e49bea02e70ce6f07cb37004"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:23857adc919b64bba8a4db0eccb24e53fcaf85633e690ef1581c5562ed58cae7"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:eba325409a0f990f9b43ed18916cbf5b9779bc4c979b8887c444e7be9c38ccca"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f1b6c639750bf2a228957e25fcab7a7ff11987c543d70bf73223369f0d7bdb27"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2241d862dc6a3c0c2662a6934e79078d3a1e51a76c0dca5d65b30f3debee6c9b"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-win32.whl", hash = "sha256:18733fa6bbe44698ff96138c1f1d682bbdf0846250a42c25c108eed328fef0d4"},
|
||||
{file = "aiohttp-3.11.15-cp312-cp312-win_amd64.whl", hash = "sha256:0ec98c22030ea2a430cb11afddda7d4737b7e5c236c704f0d7d15504978598f7"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c37aa3eb8eb92f3793f0c1e73f212a76cbc8d363e9990df54b5b7099f75ce740"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5b5edd482ff0a8585b3f4e8b3681819447324166a43a5588800a5bca340dbf27"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d8c22c91bdb7417bd4f5119242dbd2e2140c0e9de47342c765f127f897eb57"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b03093d4140d926965d23497a059ec59c8c07e602d2489ce5fb990f3a897db4"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05eea49d598c4ece6f285e00464de206f838b48ff21938d5aa9c394e115945b9"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:63f8d6106566f98fcfde7a643c9da52d90679b8592dea76c4adfc3cd5d06d22c"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36a490f1ebe0b982366314c77f02258f87bd5d9bd362839dc6a24188447f37eb"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73a7f6283634dd30f93b9a67c414c00517869478b50361c503535e075fa07eaf"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0e97c1e55f6931f07ecaf53aef8352def8386adfd0cd3caa6429cc40e886d6a9"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:d8370d31e6d8ecccd97cd791c466d0acb56527df51b0c105d7ea54c7fcc0f75b"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c2de66177e087999568638c02639cf0248011b5c7fca69b006947153c534fcab"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:edcddb97574402ff7481bc6f70819ba863e77b0be58a840ed5f59d52d2f20a71"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:29cce2a7e009494e675018c0b1819a133befbab8629c797276c5d793bbdf1138"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:825ec92391e3e4ddda74de79ed0f8b443e9b412a0c9c82618ca2163abd875df5"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-win32.whl", hash = "sha256:430f9707f0c1239a92bff7769b0db185ef400278dc63c89f88ed1bd5153aab7a"},
|
||||
{file = "aiohttp-3.11.15-cp313-cp313-win_amd64.whl", hash = "sha256:f30e6980ec5d6ad815a233e19e39fe27ea94b1081c31c8aa1df1b629da3737b8"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:03ce9d2f01aef26cd6aaba2f330273c2364237db2f499b93c3e9f2e249f83cd2"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1bee0f9e2d4088b57243d63afcb06256bd2d9ff683080f51e74fa790c8cfedfd"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7e20bd6aa51a5209c9131395e20ce126e8e317c0cf78a8180f026b4d73f018f6"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23397670f3739b6f3c4019da8226190f6cce5ab2008b664ed96a6d1f0fe7f069"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc270fe480e7e425c45054543f58510fe649f70b77f514171909bbfe585105c0"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84370ff70c1677ee0c4db40fe2baee6ffc72e9d32def3ff18739c1390c62329f"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:379882ab6a40e6e0879ad8e84dca74ddbadff94af4f314b59b7da89c8463a669"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74afb637cd06760afe0aa55a3ce82178ef4c950be65935add8f3809f701f36ca"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:558de86eef9a886e43c6ae5b75cecdce81203da5832d19d11da8de417967d478"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:41f82df6f0f895f0f843bc86762bea45b4d0fe876de49239ffc89d2365426399"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:38368a32530dcdeccfa47544cf66724118d9cc8a889c057e116723ab62994380"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:204f6695b47a7d130ddf6680158920825d0d32202a870e0bc56a2ec637935b1a"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4b85486e8914d4e778343f5322834aada678eaf4c5315e50d41d9b74817ff97b"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79a08d675167c50f41d106d67bbcbc9e86e1b43d305c4b9f982d5c656a94a9ba"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-win32.whl", hash = "sha256:20dda85988a4f506bf5a457b416b238e32ab944a2deb878ddf0af92df9254a9b"},
|
||||
{file = "aiohttp-3.11.15-cp39-cp39-win_amd64.whl", hash = "sha256:3cfd9f4aeaec4a75a0b4986a9977ac0a09b3d87ae83415e4b461e86715c80897"},
|
||||
{file = "aiohttp-3.11.15.tar.gz", hash = "sha256:b9b9a1e592ac8fcc4584baea240e41f77415e0de98932fdf19565aa3b6a02d0b"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"},
|
||||
{file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohappyeyeballs = ">=2.3.0"
|
||||
aiosignal = ">=1.1.2"
|
||||
async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""}
|
||||
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
|
||||
attrs = ">=17.3.0"
|
||||
frozenlist = ">=1.1.1"
|
||||
multidict = ">=4.5,<7.0"
|
||||
propcache = ">=0.2.0"
|
||||
yarl = ">=1.17.0,<2.0"
|
||||
yarl = ">=1.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||
@@ -562,19 +571,19 @@ protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-audit-log"
|
||||
version = "0.3.2"
|
||||
version = "0.3.0"
|
||||
description = "Google Cloud Audit Protos"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_cloud_audit_log-0.3.2-py3-none-any.whl", hash = "sha256:daaedfb947a0d77f524e1bd2b560242ab4836fe1afd6b06b92f152b9658554ed"},
|
||||
{file = "google_cloud_audit_log-0.3.2.tar.gz", hash = "sha256:2598f1533a7d7cdd6c7bf448c12e5519c1d53162d78784e10bcdd1df67791bc3"},
|
||||
{file = "google_cloud_audit_log-0.3.0-py2.py3-none-any.whl", hash = "sha256:8340793120a1d5aa143605def8704ecdcead15106f754ef1381ae3bab533722f"},
|
||||
{file = "google_cloud_audit_log-0.3.0.tar.gz", hash = "sha256:901428b257020d8c1d1133e0fa004164a555e5a395c7ca3cdbb8486513df3a65"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
googleapis-common-protos = ">=1.56.2,<2.0.0"
|
||||
protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0"
|
||||
googleapis-common-protos = ">=1.56.2,<2.0dev"
|
||||
protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-core"
|
||||
@@ -597,30 +606,30 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-logging"
|
||||
version = "3.12.1"
|
||||
version = "3.11.4"
|
||||
description = "Stackdriver Logging API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_cloud_logging-3.12.1-py2.py3-none-any.whl", hash = "sha256:6817878af76ec4e7568976772839ab2c43ddfd18fbbf2ce32b13ef549cd5a862"},
|
||||
{file = "google_cloud_logging-3.12.1.tar.gz", hash = "sha256:36efc823985055b203904e83e1c8f9f999b3c64270bcda39d57386ca4effd678"},
|
||||
{file = "google_cloud_logging-3.11.4-py2.py3-none-any.whl", hash = "sha256:1d465ac62df29fb94bba4d6b4891035e57d573d84541dd8a40eebbc74422b2f0"},
|
||||
{file = "google_cloud_logging-3.11.4.tar.gz", hash = "sha256:32305d989323f3c58603044e2ac5d9cf23e9465ede511bbe90b4309270d3195c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0", extras = ["grpc"]}
|
||||
google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0"
|
||||
google-cloud-appengine-logging = ">=0.1.3,<2.0.0"
|
||||
google-cloud-audit-log = ">=0.3.1,<1.0.0"
|
||||
google-cloud-core = ">=2.0.0,<3.0.0"
|
||||
grpc-google-iam-v1 = ">=0.12.4,<1.0.0"
|
||||
google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]}
|
||||
google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev"
|
||||
google-cloud-appengine-logging = ">=0.1.3,<2.0.0dev"
|
||||
google-cloud-audit-log = ">=0.2.4,<1.0.0dev"
|
||||
google-cloud-core = ">=2.0.0,<3.0.0dev"
|
||||
grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
|
||||
opentelemetry-api = ">=1.9.0"
|
||||
proto-plus = [
|
||||
{version = ">=1.25.0,<2.0.0", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=1.22.2,<2.0.0", markers = "python_version >= \"3.11\" and python_version < \"3.13\""},
|
||||
{version = ">=1.22.0,<2.0.0", markers = "python_version < \"3.11\""},
|
||||
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\" and python_version < \"3.13\""},
|
||||
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
|
||||
]
|
||||
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0"
|
||||
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
|
||||
|
||||
[[package]]
|
||||
name = "googleapis-common-protos"
|
||||
@@ -1062,114 +1071,6 @@ httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
pydantic = ">=1.9,<3.0"
|
||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "propcache"
|
||||
version = "0.3.1"
|
||||
description = "Accelerated property cache"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"},
|
||||
{file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"},
|
||||
{file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"},
|
||||
{file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"},
|
||||
{file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"},
|
||||
{file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"},
|
||||
{file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"},
|
||||
{file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"},
|
||||
{file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.26.0"
|
||||
@@ -1238,21 +1139,20 @@ pyasn1 = ">=0.4.6,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.4"
|
||||
version = "2.10.6"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"},
|
||||
{file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"},
|
||||
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
|
||||
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.6.0"
|
||||
pydantic-core = "2.33.2"
|
||||
pydantic-core = "2.27.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
typing-inspection = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
@@ -1260,111 +1160,112 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
version = "2.27.2"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"},
|
||||
{file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"},
|
||||
{file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"},
|
||||
{file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"},
|
||||
{file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"},
|
||||
{file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"},
|
||||
{file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"},
|
||||
{file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"},
|
||||
{file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"},
|
||||
{file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"},
|
||||
{file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1372,25 +1273,22 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.9.1"
|
||||
version = "2.7.1"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef"},
|
||||
{file = "pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268"},
|
||||
{file = "pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd"},
|
||||
{file = "pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=2.7.0"
|
||||
python-dotenv = ">=0.21.0"
|
||||
typing-inspection = ">=0.4.0"
|
||||
|
||||
[package.extras]
|
||||
aws-secrets-manager = ["boto3 (>=1.35.0)", "boto3-stubs[secretsmanager]"]
|
||||
azure-key-vault = ["azure-identity (>=1.16.0)", "azure-keyvault-secrets (>=4.8.0)"]
|
||||
gcp-secret-manager = ["google-cloud-secret-manager (>=2.23.1)"]
|
||||
toml = ["tomli (>=2.0.1)"]
|
||||
yaml = ["pyyaml (>=6.0.1)"]
|
||||
|
||||
@@ -1437,14 +1335,14 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.26.0"
|
||||
version = "0.25.3"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.26.0-py3-none-any.whl", hash = "sha256:7b51ed894f4fbea1340262bdae5135797ebbe21d8638978e35d31c6d19f72fb0"},
|
||||
{file = "pytest_asyncio-0.26.0.tar.gz", hash = "sha256:c4df2a697648241ff39e7f0e4a73050b03f123f760673956cf0d72a4990e312f"},
|
||||
{file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"},
|
||||
{file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1504,21 +1402,21 @@ cli = ["click (>=5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.4.2"
|
||||
version = "2.0.2"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "realtime-2.4.2-py3-none-any.whl", hash = "sha256:0cc1b4a097acf9c0bd3a2f1998170de47744574c606617285113ddb3021e54ca"},
|
||||
{file = "realtime-2.4.2.tar.gz", hash = "sha256:760308d5310533f65a9098e0b482a518f6ad2f3c0f2723e83cf5856865bafc5d"},
|
||||
{file = "realtime-2.0.2-py3-none-any.whl", hash = "sha256:2634c915bc38807f2013f21e8bcc4d2f79870dfd81460ddb9393883d0489928a"},
|
||||
{file = "realtime-2.0.2.tar.gz", hash = "sha256:519da9325b3b8102139d51785013d592f6b2403d81fa21d838a0b0234723ed7d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohttp = ">=3.11.14,<4.0.0"
|
||||
aiohttp = ">=3.10.2,<4.0.0"
|
||||
python-dateutil = ">=2.8.1,<3.0.0"
|
||||
typing-extensions = ">=4.12.2,<5.0.0"
|
||||
websockets = ">=11,<15"
|
||||
websockets = ">=11,<13"
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
@@ -1578,30 +1476,30 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.11.10"
|
||||
version = "0.9.10"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"},
|
||||
{file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"},
|
||||
{file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"},
|
||||
{file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"},
|
||||
{file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"},
|
||||
{file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"},
|
||||
{file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"},
|
||||
{file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"},
|
||||
{file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"},
|
||||
{file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"},
|
||||
{file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"},
|
||||
{file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"},
|
||||
{file = "ruff-0.9.10-py3-none-linux_armv6l.whl", hash = "sha256:eb4d25532cfd9fe461acc83498361ec2e2252795b4f40b17e80692814329e42d"},
|
||||
{file = "ruff-0.9.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:188a6638dab1aa9bb6228a7302387b2c9954e455fb25d6b4470cb0641d16759d"},
|
||||
{file = "ruff-0.9.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5284dcac6b9dbc2fcb71fdfc26a217b2ca4ede6ccd57476f52a587451ebe450d"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47678f39fa2a3da62724851107f438c8229a3470f533894b5568a39b40029c0c"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99713a6e2766b7a17147b309e8c915b32b07a25c9efd12ada79f217c9c778b3e"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524ee184d92f7c7304aa568e2db20f50c32d1d0caa235d8ddf10497566ea1a12"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:df92aeac30af821f9acf819fc01b4afc3dfb829d2782884f8739fb52a8119a16"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de42e4edc296f520bb84954eb992a07a0ec5a02fecb834498415908469854a52"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d257f95b65806104b6b1ffca0ea53f4ef98454036df65b1eda3693534813ecd1"},
|
||||
{file = "ruff-0.9.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60dec7201c0b10d6d11be00e8f2dbb6f40ef1828ee75ed739923799513db24c"},
|
||||
{file = "ruff-0.9.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d838b60007da7a39c046fcdd317293d10b845001f38bcb55ba766c3875b01e43"},
|
||||
{file = "ruff-0.9.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ccaf903108b899beb8e09a63ffae5869057ab649c1e9231c05ae354ebc62066c"},
|
||||
{file = "ruff-0.9.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f9567d135265d46e59d62dc60c0bfad10e9a6822e231f5b24032dba5a55be6b5"},
|
||||
{file = "ruff-0.9.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5f202f0d93738c28a89f8ed9eaba01b7be339e5d8d642c994347eaa81c6d75b8"},
|
||||
{file = "ruff-0.9.10-py3-none-win32.whl", hash = "sha256:bfb834e87c916521ce46b1788fbb8484966e5113c02df216680102e9eb960029"},
|
||||
{file = "ruff-0.9.10-py3-none-win_amd64.whl", hash = "sha256:f2160eeef3031bf4b17df74e307d4c5fb689a6f3a26a2de3f7ef4044e3c484f1"},
|
||||
{file = "ruff-0.9.10-py3-none-win_arm64.whl", hash = "sha256:5fd804c0327a5e5ea26615550e706942f348b197d5475ff34c19733aee4b2e69"},
|
||||
{file = "ruff-0.9.10.tar.gz", hash = "sha256:9bacb735d7bada9cfb0f2c227d3658fc443d90a727b47f206fb33f52f3c0eac7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1663,21 +1561,21 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.15.1"
|
||||
version = "2.13.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.15.1-py3-none-any.whl", hash = "sha256:749299cdd74ecf528f52045c1e60d9dba81cc2054656f754c0ca7fba0dd34827"},
|
||||
{file = "supabase-2.15.1.tar.gz", hash = "sha256:66e847dab9346062aa6a25b4e81ac786b972c5d4299827c57d1d5bd6a0346070"},
|
||||
{file = "supabase-2.13.0-py3-none-any.whl", hash = "sha256:6cfccc055be21dab311afc5e9d5b37f3a4966f8394703763fbc8f8e86f36eaa6"},
|
||||
{file = "supabase-2.13.0.tar.gz", hash = "sha256:452574d34bd978c8d11b5f02b0182b48e8854e511c969483c83875ec01495f11"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
gotrue = ">=2.11.0,<3.0.0"
|
||||
httpx = ">=0.26,<0.29"
|
||||
postgrest = ">0.19,<1.1"
|
||||
realtime = ">=2.4.0,<2.5.0"
|
||||
postgrest = ">=0.19,<0.20"
|
||||
realtime = ">=2.0.0,<3.0.0"
|
||||
storage3 = ">=0.10,<0.12"
|
||||
supafunc = ">=0.9,<0.10"
|
||||
|
||||
@@ -1722,21 +1620,6 @@ files = [
|
||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.0"
|
||||
description = "Runtime typing introspection tools"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"},
|
||||
{file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.12.0"
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.2"
|
||||
@@ -1919,100 +1802,109 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.18.3"
|
||||
version = "1.11.1"
|
||||
description = "Yet another URL library"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "yarl-1.18.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7df647e8edd71f000a5208fe6ff8c382a1de8edfbccdbbfe649d263de07d8c34"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c69697d3adff5aa4f874b19c0e4ed65180ceed6318ec856ebc423aa5850d84f7"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:602d98f2c2d929f8e697ed274fbadc09902c4025c5a9963bf4e9edfc3ab6f7ed"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c654d5207c78e0bd6d749f6dae1dcbbfde3403ad3a4b11f3c5544d9906969dde"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5094d9206c64181d0f6e76ebd8fb2f8fe274950a63890ee9e0ebfd58bf9d787b"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35098b24e0327fc4ebdc8ffe336cee0a87a700c24ffed13161af80124b7dc8e5"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3236da9272872443f81fedc389bace88408f64f89f75d1bdb2256069a8730ccc"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c08cc9b16f4f4bc522771d96734c7901e7ebef70c6c5c35dd0f10845270bcd"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80316a8bd5109320d38eef8833ccf5f89608c9107d02d2a7f985f98ed6876990"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:c1e1cc06da1491e6734f0ea1e6294ce00792193c463350626571c287c9a704db"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fea09ca13323376a2fdfb353a5fa2e59f90cd18d7ca4eaa1fd31f0a8b4f91e62"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e3b9fd71836999aad54084906f8663dffcd2a7fb5cdafd6c37713b2e72be1760"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:757e81cae69244257d125ff31663249b3013b5dc0a8520d73694aed497fb195b"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b1771de9944d875f1b98a745bc547e684b863abf8f8287da8466cf470ef52690"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-win32.whl", hash = "sha256:8874027a53e3aea659a6d62751800cf6e63314c160fd607489ba5c2edd753cf6"},
|
||||
{file = "yarl-1.18.3-cp310-cp310-win_amd64.whl", hash = "sha256:93b2e109287f93db79210f86deb6b9bbb81ac32fc97236b16f7433db7fc437d8"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8503ad47387b8ebd39cbbbdf0bf113e17330ffd339ba1144074da24c545f0069"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02ddb6756f8f4517a2d5e99d8b2f272488e18dd0bfbc802f31c16c6c20f22193"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:67a283dd2882ac98cc6318384f565bffc751ab564605959df4752d42483ad889"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d980e0325b6eddc81331d3f4551e2a333999fb176fd153e075c6d1c2530aa8a8"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b643562c12680b01e17239be267bc306bbc6aac1f34f6444d1bded0c5ce438ca"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c017a3b6df3a1bd45b9fa49a0f54005e53fbcad16633870104b66fa1a30a29d8"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75674776d96d7b851b6498f17824ba17849d790a44d282929c42dbb77d4f17ae"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ccaa3a4b521b780a7e771cc336a2dba389a0861592bbce09a476190bb0c8b4b3"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2d06d3005e668744e11ed80812e61efd77d70bb7f03e33c1598c301eea20efbb"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9d41beda9dc97ca9ab0b9888cb71f7539124bc05df02c0cff6e5acc5a19dcc6e"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ba23302c0c61a9999784e73809427c9dbedd79f66a13d84ad1b1943802eaaf59"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6748dbf9bfa5ba1afcc7556b71cda0d7ce5f24768043a02a58846e4a443d808d"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0b0cad37311123211dc91eadcb322ef4d4a66008d3e1bdc404808992260e1a0e"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0fb2171a4486bb075316ee754c6d8382ea6eb8b399d4ec62fde2b591f879778a"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-win32.whl", hash = "sha256:61b1a825a13bef4a5f10b1885245377d3cd0bf87cba068e1d9a88c2ae36880e1"},
|
||||
{file = "yarl-1.18.3-cp311-cp311-win_amd64.whl", hash = "sha256:b9d60031cf568c627d028239693fd718025719c02c9f55df0a53e587aab951b5"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1dd4bdd05407ced96fed3d7f25dbbf88d2ffb045a0db60dbc247f5b3c5c25d50"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7c33dd1931a95e5d9a772d0ac5e44cac8957eaf58e3c8da8c1414de7dd27c576"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b411eddcfd56a2f0cd6a384e9f4f7aa3efee14b188de13048c25b5e91f1640"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:436c4fc0a4d66b2badc6c5fc5ef4e47bb10e4fd9bf0c79524ac719a01f3607c2"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e35ef8683211db69ffe129a25d5634319a677570ab6b2eba4afa860f54eeaf75"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84b2deecba4a3f1a398df819151eb72d29bfeb3b69abb145a00ddc8d30094512"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e5a1fea0fd4f5bfa7440a47eff01d9822a65b4488f7cff83155a0f31a2ecba"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0e883008013c0e4aef84dcfe2a0b172c4d23c2669412cf5b3371003941f72bb"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a3f356548e34a70b0172d8890006c37be92995f62d95a07b4a42e90fba54272"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ccd17349166b1bee6e529b4add61727d3f55edb7babbe4069b5764c9587a8cc6"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b958ddd075ddba5b09bb0be8a6d9906d2ce933aee81100db289badbeb966f54e"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c7d79f7d9aabd6011004e33b22bc13056a3e3fb54794d138af57f5ee9d9032cb"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4891ed92157e5430874dad17b15eb1fda57627710756c27422200c52d8a4e393"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ce1af883b94304f493698b00d0f006d56aea98aeb49d75ec7d98cd4a777e9285"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-win32.whl", hash = "sha256:f91c4803173928a25e1a55b943c81f55b8872f0018be83e3ad4938adffb77dd2"},
|
||||
{file = "yarl-1.18.3-cp312-cp312-win_amd64.whl", hash = "sha256:7e2ee16578af3b52ac2f334c3b1f92262f47e02cc6193c598502bd46f5cd1477"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:90adb47ad432332d4f0bc28f83a5963f426ce9a1a8809f5e584e704b82685dcb"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:913829534200eb0f789d45349e55203a091f45c37a2674678744ae52fae23efa"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ef9f7768395923c3039055c14334ba4d926f3baf7b776c923c93d80195624782"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a19f62ff30117e706ebc9090b8ecc79aeb77d0b1f5ec10d2d27a12bc9f66d0"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e17c9361d46a4d5addf777c6dd5eab0715a7684c2f11b88c67ac37edfba6c482"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a74a13a4c857a84a845505fd2d68e54826a2cd01935a96efb1e9d86c728e186"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41f7ce59d6ee7741af71d82020346af364949314ed3d87553763a2df1829cc58"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f52a265001d830bc425f82ca9eabda94a64a4d753b07d623a9f2863fde532b53"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:82123d0c954dc58db301f5021a01854a85bf1f3bb7d12ae0c01afc414a882ca2"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2ec9bbba33b2d00999af4631a3397d1fd78290c48e2a3e52d8dd72db3a067ac8"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fbd6748e8ab9b41171bb95c6142faf068f5ef1511935a0aa07025438dd9a9bc1"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:877d209b6aebeb5b16c42cbb377f5f94d9e556626b1bfff66d7b0d115be88d0a"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b464c4ab4bfcb41e3bfd3f1c26600d038376c2de3297760dfe064d2cb7ea8e10"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8d39d351e7faf01483cc7ff7c0213c412e38e5a340238826be7e0e4da450fdc8"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-win32.whl", hash = "sha256:61ee62ead9b68b9123ec24bc866cbef297dd266175d53296e2db5e7f797f902d"},
|
||||
{file = "yarl-1.18.3-cp313-cp313-win_amd64.whl", hash = "sha256:578e281c393af575879990861823ef19d66e2b1d0098414855dd367e234f5b3c"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61e5e68cb65ac8f547f6b5ef933f510134a6bf31bb178be428994b0cb46c2a04"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe57328fbc1bfd0bd0514470ac692630f3901c0ee39052ae47acd1d90a436719"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a440a2a624683108a1b454705ecd7afc1c3438a08e890a1513d468671d90a04e"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c7907c8548bcd6ab860e5f513e727c53b4a714f459b084f6580b49fa1b9cee"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4f6450109834af88cb4cc5ecddfc5380ebb9c228695afc11915a0bf82116789"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9ca04806f3be0ac6d558fffc2fdf8fcef767e0489d2684a21912cc4ed0cd1b8"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77a6e85b90a7641d2e07184df5557132a337f136250caafc9ccaa4a2a998ca2c"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6333c5a377c8e2f5fae35e7b8f145c617b02c939d04110c76f29ee3676b5f9a5"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0b3c92fa08759dbf12b3a59579a4096ba9af8dd344d9a813fc7f5070d86bbab1"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:4ac515b860c36becb81bb84b667466885096b5fc85596948548b667da3bf9f24"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:045b8482ce9483ada4f3f23b3774f4e1bf4f23a2d5c912ed5170f68efb053318"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a4bb030cf46a434ec0225bddbebd4b89e6471814ca851abb8696170adb163985"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54d6921f07555713b9300bee9c50fb46e57e2e639027089b1d795ecd9f7fa910"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1d407181cfa6e70077df3377938c08012d18893f9f20e92f7d2f314a437c30b1"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-win32.whl", hash = "sha256:ac36703a585e0929b032fbaab0707b75dc12703766d0b53486eabd5139ebadd5"},
|
||||
{file = "yarl-1.18.3-cp39-cp39-win_amd64.whl", hash = "sha256:ba87babd629f8af77f557b61e49e7c7cac36f22f871156b91e10a6e9d4f829e9"},
|
||||
{file = "yarl-1.18.3-py3-none-any.whl", hash = "sha256:b57f4f58099328dfb26c6a771d09fb20dbbae81d20cfb66141251ea063bd101b"},
|
||||
{file = "yarl-1.18.3.tar.gz", hash = "sha256:ac1801c45cbf77b6c99242eeff4fffb5e4e73a800b5c4ad4fc0be5def634d2e1"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:400cd42185f92de559d29eeb529e71d80dfbd2f45c36844914a4a34297ca6f00"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8258c86f47e080a258993eed877d579c71da7bda26af86ce6c2d2d072c11320d"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2164cd9725092761fed26f299e3f276bb4b537ca58e6ff6b252eae9631b5c96e"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08ea567c16f140af8ddc7cb58e27e9138a1386e3e6e53982abaa6f2377b38cc"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:768ecc550096b028754ea28bf90fde071c379c62c43afa574edc6f33ee5daaec"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2909fa3a7d249ef64eeb2faa04b7957e34fefb6ec9966506312349ed8a7e77bf"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01a8697ec24f17c349c4f655763c4db70eebc56a5f82995e5e26e837c6eb0e49"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e286580b6511aac7c3268a78cdb861ec739d3e5a2a53b4809faef6b49778eaff"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4179522dc0305c3fc9782549175c8e8849252fefeb077c92a73889ccbcd508ad"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:27fcb271a41b746bd0e2a92182df507e1c204759f460ff784ca614e12dd85145"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f61db3b7e870914dbd9434b560075e0366771eecbe6d2b5561f5bc7485f39efd"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:c92261eb2ad367629dc437536463dc934030c9e7caca861cc51990fe6c565f26"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d95b52fbef190ca87d8c42f49e314eace4fc52070f3dfa5f87a6594b0c1c6e46"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-win32.whl", hash = "sha256:489fa8bde4f1244ad6c5f6d11bb33e09cf0d1d0367edb197619c3e3fc06f3d91"},
|
||||
{file = "yarl-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:476e20c433b356e16e9a141449f25161e6b69984fb4cdbd7cd4bd54c17844998"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:946eedc12895873891aaceb39bceb484b4977f70373e0122da483f6c38faaa68"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:21a7c12321436b066c11ec19c7e3cb9aec18884fe0d5b25d03d756a9e654edfe"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c35f493b867912f6fda721a59cc7c4766d382040bdf1ddaeeaa7fa4d072f4675"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25861303e0be76b60fddc1250ec5986c42f0a5c0c50ff57cc30b1be199c00e63"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4b53f73077e839b3f89c992223f15b1d2ab314bdbdf502afdc7bb18e95eae27"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:327c724b01b8641a1bf1ab3b232fb638706e50f76c0b5bf16051ab65c868fac5"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4307d9a3417eea87715c9736d050c83e8c1904e9b7aada6ce61b46361b733d92"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a28bed68ab8fb7e380775f0029a079f08a17799cb3387a65d14ace16c12e2b"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:067b961853c8e62725ff2893226fef3d0da060656a9827f3f520fb1d19b2b68a"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8215f6f21394d1f46e222abeb06316e77ef328d628f593502d8fc2a9117bde83"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:498442e3af2a860a663baa14fbf23fb04b0dd758039c0e7c8f91cb9279799bff"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:69721b8effdb588cb055cc22f7c5105ca6fdaa5aeb3ea09021d517882c4a904c"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e969fa4c1e0b1a391f3fcbcb9ec31e84440253325b534519be0d28f4b6b533e"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-win32.whl", hash = "sha256:7d51324a04fc4b0e097ff8a153e9276c2593106a811704025bbc1d6916f45ca6"},
|
||||
{file = "yarl-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:15061ce6584ece023457fb8b7a7a69ec40bf7114d781a8c4f5dcd68e28b5c53b"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a4264515f9117be204935cd230fb2a052dd3792789cc94c101c535d349b3dab0"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f41fa79114a1d2eddb5eea7b912d6160508f57440bd302ce96eaa384914cd265"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02da8759b47d964f9173c8675710720b468aa1c1693be0c9c64abb9d8d9a4867"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9361628f28f48dcf8b2f528420d4d68102f593f9c2e592bfc842f5fb337e44fd"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b91044952da03b6f95fdba398d7993dd983b64d3c31c358a4c89e3c19b6f7aef"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:74db2ef03b442276d25951749a803ddb6e270d02dda1d1c556f6ae595a0d76a8"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e975a2211952a8a083d1b9d9ba26472981ae338e720b419eb50535de3c02870"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8aef97ba1dd2138112890ef848e17d8526fe80b21f743b4ee65947ea184f07a2"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7915ea49b0c113641dc4d9338efa9bd66b6a9a485ffe75b9907e8573ca94b84"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:504cf0d4c5e4579a51261d6091267f9fd997ef58558c4ffa7a3e1460bd2336fa"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:3de5292f9f0ee285e6bd168b2a77b2a00d74cbcfa420ed078456d3023d2f6dff"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a34e1e30f1774fa35d37202bbeae62423e9a79d78d0874e5556a593479fdf239"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:66b63c504d2ca43bf7221a1f72fbe981ff56ecb39004c70a94485d13e37ebf45"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-win32.whl", hash = "sha256:a28b70c9e2213de425d9cba5ab2e7f7a1c8ca23a99c4b5159bf77b9c31251447"},
|
||||
{file = "yarl-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:17b5a386d0d36fb828e2fb3ef08c8829c1ebf977eef88e5367d1c8c94b454639"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1fa2e7a406fbd45b61b4433e3aa254a2c3e14c4b3186f6e952d08a730807fa0c"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:750f656832d7d3cb0c76be137ee79405cc17e792f31e0a01eee390e383b2936e"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b8486f322d8f6a38539136a22c55f94d269addb24db5cb6f61adc61eabc9d93"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fce4da3703ee6048ad4138fe74619c50874afe98b1ad87b2698ef95bf92c96d"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed653638ef669e0efc6fe2acb792275cb419bf9cb5c5049399f3556995f23c7"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18ac56c9dd70941ecad42b5a906820824ca72ff84ad6fa18db33c2537ae2e089"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:688654f8507464745ab563b041d1fb7dab5d9912ca6b06e61d1c4708366832f5"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4973eac1e2ff63cf187073cd4e1f1148dcd119314ab79b88e1b3fad74a18c9d5"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:964a428132227edff96d6f3cf261573cb0f1a60c9a764ce28cda9525f18f7786"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6d23754b9939cbab02c63434776df1170e43b09c6a517585c7ce2b3d449b7318"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c2dc4250fe94d8cd864d66018f8344d4af50e3758e9d725e94fecfa27588ff82"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09696438cb43ea6f9492ef237761b043f9179f455f405279e609f2bc9100212a"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:999bfee0a5b7385a0af5ffb606393509cfde70ecca4f01c36985be6d33e336da"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-win32.whl", hash = "sha256:ce928c9c6409c79e10f39604a7e214b3cb69552952fbda8d836c052832e6a979"},
|
||||
{file = "yarl-1.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:501c503eed2bb306638ccb60c174f856cc3246c861829ff40eaa80e2f0330367"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dae7bd0daeb33aa3e79e72877d3d51052e8b19c9025ecf0374f542ea8ec120e4"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3ff6b1617aa39279fe18a76c8d165469c48b159931d9b48239065767ee455b2b"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3257978c870728a52dcce8c2902bf01f6c53b65094b457bf87b2644ee6238ddc"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f351fa31234699d6084ff98283cb1e852270fe9e250a3b3bf7804eb493bd937"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8aef1b64da41d18026632d99a06b3fefe1d08e85dd81d849fa7c96301ed22f1b"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7175a87ab8f7fbde37160a15e58e138ba3b2b0e05492d7351314a250d61b1591"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba444bdd4caa2a94456ef67a2f383710928820dd0117aae6650a4d17029fa25e"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ea9682124fc062e3d931c6911934a678cb28453f957ddccf51f568c2f2b5e05"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8418c053aeb236b20b0ab8fa6bacfc2feaaf7d4683dd96528610989c99723d5f"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:61a5f2c14d0a1adfdd82258f756b23a550c13ba4c86c84106be4c111a3a4e413"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f3a6d90cab0bdf07df8f176eae3a07127daafcf7457b997b2bf46776da2c7eb7"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:077da604852be488c9a05a524068cdae1e972b7dc02438161c32420fb4ec5e14"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:15439f3c5c72686b6c3ff235279630d08936ace67d0fe5c8d5bbc3ef06f5a420"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-win32.whl", hash = "sha256:238a21849dd7554cb4d25a14ffbfa0ef380bb7ba201f45b144a14454a72ffa5a"},
|
||||
{file = "yarl-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:67459cf8cf31da0e2cbdb4b040507e535d25cfbb1604ca76396a3a66b8ba37a6"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:884eab2ce97cbaf89f264372eae58388862c33c4f551c15680dd80f53c89a269"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a336eaa7ee7e87cdece3cedb395c9657d227bfceb6781295cf56abcd3386a26"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87f020d010ba80a247c4abc335fc13421037800ca20b42af5ae40e5fd75e7909"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:637c7ddb585a62d4469f843dac221f23eec3cbad31693b23abbc2c366ad41ff4"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48dfd117ab93f0129084577a07287376cc69c08138694396f305636e229caa1a"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e0ae31fb5ccab6eda09ba1494e87eb226dcbd2372dae96b87800e1dcc98804"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f46f81501160c28d0c0b7333b4f7be8983dbbc161983b6fb814024d1b4952f79"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04293941646647b3bfb1719d1d11ff1028e9c30199509a844da3c0f5919dc520"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:250e888fa62d73e721f3041e3a9abf427788a1934b426b45e1b92f62c1f68366"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e8f63904df26d1a66aabc141bfd258bf738b9bc7bc6bdef22713b4f5ef789a4c"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:aac44097d838dda26526cffb63bdd8737a2dbdf5f2c68efb72ad83aec6673c7e"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:267b24f891e74eccbdff42241c5fb4f974de2d6271dcc7d7e0c9ae1079a560d9"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6907daa4b9d7a688063ed098c472f96e8181733c525e03e866fb5db480a424df"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-win32.whl", hash = "sha256:14438dfc5015661f75f85bc5adad0743678eefee266ff0c9a8e32969d5d69f74"},
|
||||
{file = "yarl-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:94d0caaa912bfcdc702a4204cd5e2bb01eb917fc4f5ea2315aa23962549561b0"},
|
||||
{file = "yarl-1.11.1-py3-none-any.whl", hash = "sha256:72bf26f66456baa0584eff63e44545c9f0eaed9b73cb6601b647c91f14c11f38"},
|
||||
{file = "yarl-1.11.1.tar.gz", hash = "sha256:1bb2d9e212fb7449b8fb73bc461b51eaa17cc8430b4a87d87be7b25052d92f53"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
idna = ">=2.0"
|
||||
multidict = ">=4.0"
|
||||
propcache = ">=0.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
@@ -2037,4 +1929,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "78ebf65cdef769cfbe92fe204f01e32d219cca9ee5a6ca9e657aa0630be63802"
|
||||
content-hash = "931772287f71c539575d601e6398423bf68e09ca87ae1a144057c7f5707cf978"
|
||||
|
||||
@@ -7,20 +7,21 @@ readme = "README.md"
|
||||
packages = [{ include = "autogpt_libs" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.12.1"
|
||||
pydantic = "^2.11.4"
|
||||
pydantic-settings = "^2.9.1"
|
||||
google-cloud-logging = "^3.11.4"
|
||||
pydantic = "^2.10.6"
|
||||
pydantic-settings = "^2.7.1"
|
||||
pyjwt = "^2.10.1"
|
||||
pytest-asyncio = "^0.26.0"
|
||||
pytest-asyncio = "^0.25.3"
|
||||
pytest-mock = "^3.14.0"
|
||||
supabase = "^2.15.1"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.13.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.1"
|
||||
ruff = "^0.11.10"
|
||||
ruff = "^0.9.10"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -8,7 +8,6 @@ DB_CONNECT_TIMEOUT=60
|
||||
DB_POOL_TIMEOUT=300
|
||||
DB_SCHEMA=platform
|
||||
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
DIRECT_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
PRISMA_SCHEMA="postgres/schema.prisma"
|
||||
|
||||
# EXECUTOR
|
||||
@@ -66,13 +65,6 @@ MEDIA_GCS_BUCKET_NAME=
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=http://localhost:3000
|
||||
|
||||
## Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
## Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
## This is the backend secret key
|
||||
TURNSTILE_SECRET_KEY=
|
||||
## This is the verify URL
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
# integration to work.
|
||||
@@ -129,7 +121,6 @@ OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
OPEN_ROUTER_API_KEY=
|
||||
LLAMA_API_KEY=
|
||||
|
||||
# Reddit
|
||||
# Go to https://www.reddit.com/prefs/apps and create a new app
|
||||
@@ -183,6 +174,9 @@ EXA_API_KEY=
|
||||
E2B_API_KEY=
|
||||
|
||||
# Mem0
|
||||
# Airtable
|
||||
AIRTABLE_API_KEY=
|
||||
|
||||
MEM0_API_KEY=
|
||||
|
||||
# Nvidia
|
||||
@@ -197,12 +191,6 @@ SMARTLEAD_API_KEY=
|
||||
# ZeroBounce
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# Ayrshare
|
||||
AYRSHARE_API_KEY=
|
||||
AYRSHARE_JWT_KEY=
|
||||
|
||||
## ===== OPTIONAL API KEYS END ===== ##
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
|
||||
@@ -73,6 +73,7 @@ FROM server_dependencies AS server
|
||||
COPY autogpt_platform/backend /app/autogpt_platform/backend
|
||||
RUN poetry install --no-ansi --only-root
|
||||
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8000
|
||||
|
||||
CMD ["poetry", "run", "rest"]
|
||||
|
||||
@@ -1,99 +1,89 @@
|
||||
import functools
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
from typing import Type, TypeVar
|
||||
|
||||
from backend.data.block import Block
|
||||
|
||||
# Dynamically load all modules under backend.blocks
|
||||
AVAILABLE_MODULES = []
|
||||
current_dir = Path(__file__).parent
|
||||
modules = [
|
||||
str(f.relative_to(current_dir))[:-3].replace(os.path.sep, ".")
|
||||
for f in current_dir.rglob("*.py")
|
||||
if f.is_file() and f.name != "__init__.py"
|
||||
]
|
||||
for module in modules:
|
||||
if not re.match("^[a-z0-9_.]+$", module):
|
||||
raise ValueError(
|
||||
f"Block module {module} error: module name must be lowercase, "
|
||||
"and contain only alphanumeric characters and underscores."
|
||||
)
|
||||
|
||||
importlib.import_module(f".{module}", package=__name__)
|
||||
AVAILABLE_MODULES.append(module)
|
||||
|
||||
# Load all Block instances from the available modules
|
||||
AVAILABLE_BLOCKS: dict[str, Type[Block]] = {}
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.data.block import Block
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@functools.cache
|
||||
def load_all_blocks() -> dict[str, type["Block"]]:
|
||||
from backend.data.block import Block
|
||||
|
||||
# Dynamically load all modules under backend.blocks
|
||||
current_dir = Path(__file__).parent
|
||||
modules = [
|
||||
str(f.relative_to(current_dir))[:-3].replace(os.path.sep, ".")
|
||||
for f in current_dir.rglob("*.py")
|
||||
if f.is_file() and f.name != "__init__.py"
|
||||
]
|
||||
for module in modules:
|
||||
if not re.match("^[a-z0-9_.]+$", module):
|
||||
raise ValueError(
|
||||
f"Block module {module} error: module name must be lowercase, "
|
||||
"and contain only alphanumeric characters and underscores."
|
||||
)
|
||||
|
||||
importlib.import_module(f".{module}", package=__name__)
|
||||
|
||||
# Load all Block instances from the available modules
|
||||
available_blocks: dict[str, type["Block"]] = {}
|
||||
for block_cls in all_subclasses(Block):
|
||||
class_name = block_cls.__name__
|
||||
|
||||
if class_name.endswith("Base"):
|
||||
continue
|
||||
|
||||
if not class_name.endswith("Block"):
|
||||
raise ValueError(
|
||||
f"Block class {class_name} does not end with 'Block'. "
|
||||
"If you are creating an abstract class, "
|
||||
"please name the class with 'Base' at the end"
|
||||
)
|
||||
|
||||
block = block_cls.create()
|
||||
|
||||
if not isinstance(block.id, str) or len(block.id) != 36:
|
||||
raise ValueError(
|
||||
f"Block ID {block.name} error: {block.id} is not a valid UUID"
|
||||
)
|
||||
|
||||
if block.id in available_blocks:
|
||||
raise ValueError(
|
||||
f"Block ID {block.name} error: {block.id} is already in use"
|
||||
)
|
||||
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
f"{block.name} `error` field in output_schema must be a string"
|
||||
)
|
||||
|
||||
# Ensure all fields in input_schema and output_schema are annotated SchemaFields
|
||||
for field_name, field in [*input_schema.items(), *output_schema.items()]:
|
||||
if field.annotation is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} that is not annotated"
|
||||
)
|
||||
if field.json_schema_extra is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} not defined as SchemaField"
|
||||
)
|
||||
|
||||
for field in block.input_schema.model_fields.values():
|
||||
if field.annotation is bool and field.default not in (True, False):
|
||||
raise ValueError(
|
||||
f"{block.name} has a boolean field with no default value"
|
||||
)
|
||||
|
||||
available_blocks[block.id] = block_cls
|
||||
|
||||
return available_blocks
|
||||
|
||||
|
||||
__all__ = ["load_all_blocks"]
|
||||
|
||||
|
||||
def all_subclasses(cls: type[T]) -> list[type[T]]:
|
||||
def all_subclasses(cls: Type[T]) -> list[Type[T]]:
|
||||
subclasses = cls.__subclasses__()
|
||||
for subclass in subclasses:
|
||||
subclasses += all_subclasses(subclass)
|
||||
return subclasses
|
||||
|
||||
|
||||
for block_cls in all_subclasses(Block):
|
||||
name = block_cls.__name__
|
||||
|
||||
if block_cls.__name__.endswith("Base"):
|
||||
continue
|
||||
|
||||
if not block_cls.__name__.endswith("Block"):
|
||||
raise ValueError(
|
||||
f"Block class {block_cls.__name__} does not end with 'Block', If you are creating an abstract class, please name the class with 'Base' at the end"
|
||||
)
|
||||
|
||||
block = block_cls.create()
|
||||
|
||||
if not isinstance(block.id, str) or len(block.id) != 36:
|
||||
raise ValueError(f"Block ID {block.name} error: {block.id} is not a valid UUID")
|
||||
|
||||
if block.id in AVAILABLE_BLOCKS:
|
||||
raise ValueError(f"Block ID {block.name} error: {block.id} is already in use")
|
||||
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
f"{block.name} `error` field in output_schema must be a string"
|
||||
)
|
||||
|
||||
# Make sure all fields in input_schema and output_schema are annotated and has a value
|
||||
for field_name, field in [*input_schema.items(), *output_schema.items()]:
|
||||
if field.annotation is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} that is not annotated"
|
||||
)
|
||||
if field.json_schema_extra is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} not defined as SchemaField"
|
||||
)
|
||||
|
||||
for field in block.input_schema.model_fields.values():
|
||||
if field.annotation is bool and field.default not in (True, False):
|
||||
raise ValueError(f"{block.name} has a boolean field with no default value")
|
||||
|
||||
if block.disabled:
|
||||
continue
|
||||
|
||||
AVAILABLE_BLOCKS[block.id] = block_cls
|
||||
|
||||
__all__ = ["AVAILABLE_MODULES", "AVAILABLE_BLOCKS"]
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
from typing import Any
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
@@ -11,33 +13,44 @@ from backend.data.block import (
|
||||
get_block,
|
||||
)
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.model import CredentialsMetaInput, SchemaField
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def get_executor_manager_client():
|
||||
from backend.executor import ExecutionManager
|
||||
from backend.util.service import get_service_client
|
||||
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def get_event_bus():
|
||||
from backend.data.execution import RedisExecutionEventBus
|
||||
|
||||
return RedisExecutionEventBus()
|
||||
|
||||
|
||||
class AgentExecutorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
user_id: str = SchemaField(description="User ID")
|
||||
graph_id: str = SchemaField(description="Graph ID")
|
||||
graph_version: int = SchemaField(description="Graph Version")
|
||||
|
||||
inputs: BlockInput = SchemaField(description="Input data for the graph")
|
||||
data: BlockInput = SchemaField(description="Input data for the graph")
|
||||
input_schema: dict = SchemaField(description="Input schema for the graph")
|
||||
output_schema: dict = SchemaField(description="Output schema for the graph")
|
||||
|
||||
node_credentials_input_map: Optional[
|
||||
dict[str, dict[str, CredentialsMetaInput]]
|
||||
] = SchemaField(default=None, hidden=True)
|
||||
|
||||
@classmethod
|
||||
def get_input_schema(cls, data: BlockInput) -> dict[str, Any]:
|
||||
return data.get("input_schema", {})
|
||||
|
||||
@classmethod
|
||||
def get_input_defaults(cls, data: BlockInput) -> BlockInput:
|
||||
return data.get("inputs", {})
|
||||
return data.get("data", {})
|
||||
|
||||
@classmethod
|
||||
def get_missing_input(cls, data: BlockInput) -> set[str]:
|
||||
@@ -62,27 +75,26 @@ class AgentExecutorBlock(Block):
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
from backend.data.execution import ExecutionEventType
|
||||
from backend.executor import utils as execution_utils
|
||||
executor_manager = get_executor_manager_client()
|
||||
event_bus = get_event_bus()
|
||||
|
||||
event_bus = execution_utils.get_execution_event_bus()
|
||||
|
||||
graph_exec = execution_utils.add_graph_execution(
|
||||
graph_exec = executor_manager.add_execution(
|
||||
graph_id=input_data.graph_id,
|
||||
graph_version=input_data.graph_version,
|
||||
user_id=input_data.user_id,
|
||||
inputs=input_data.inputs,
|
||||
node_credentials_input_map=input_data.node_credentials_input_map,
|
||||
data=input_data.data,
|
||||
)
|
||||
log_id = f"Graph #{input_data.graph_id}-V{input_data.graph_version}, exec-id: {graph_exec.id}"
|
||||
log_id = f"Graph #{input_data.graph_id}-V{input_data.graph_version}, exec-id: {graph_exec.graph_exec_id}"
|
||||
logger.info(f"Starting execution of {log_id}")
|
||||
|
||||
for event in event_bus.listen(
|
||||
user_id=graph_exec.user_id,
|
||||
graph_id=graph_exec.graph_id,
|
||||
graph_exec_id=graph_exec.id,
|
||||
graph_id=graph_exec.graph_id, graph_exec_id=graph_exec.graph_exec_id
|
||||
):
|
||||
if event.event_type == ExecutionEventType.GRAPH_EXEC_UPDATE:
|
||||
logger.info(
|
||||
f"Execution {log_id} produced input {event.input_data} output {event.output_data}"
|
||||
)
|
||||
|
||||
if not event.node_id:
|
||||
if event.status in [
|
||||
ExecutionStatus.COMPLETED,
|
||||
ExecutionStatus.TERMINATED,
|
||||
@@ -93,10 +105,6 @@ class AgentExecutorBlock(Block):
|
||||
else:
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
f"Execution {log_id} produced input {event.input_data} output {event.output_data}"
|
||||
)
|
||||
|
||||
if not event.block_id:
|
||||
logger.warning(f"{log_id} received event without block_id {event}")
|
||||
continue
|
||||
@@ -111,7 +119,5 @@ class AgentExecutorBlock(Block):
|
||||
continue
|
||||
|
||||
for output_data in event.output_data.get("output", []):
|
||||
logger.debug(
|
||||
f"Execution {log_id} produced {output_name}: {output_data}"
|
||||
)
|
||||
logger.info(f"Execution {log_id} produced {output_name}: {output_data}")
|
||||
yield output_name, output_data
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockSchema
|
||||
@@ -170,7 +170,7 @@ class AIImageGeneratorBlock(Block):
|
||||
):
|
||||
try:
|
||||
# Initialize Replicate client
|
||||
client = ReplicateClient(api_token=credentials.api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=credentials.api_key.get_secret_value())
|
||||
|
||||
# Run the model with input parameters
|
||||
output = client.run(model_name, input=input_params, wait=False)
|
||||
|
||||
@@ -3,8 +3,8 @@ import time
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
@@ -196,7 +196,7 @@ class AIMusicGeneratorBlock(Block):
|
||||
normalization_strategy: NormalizationStrategy,
|
||||
):
|
||||
# Initialize Replicate client with the API key
|
||||
client = ReplicateClient(api_token=api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=api_key.get_secret_value())
|
||||
|
||||
# Run the model with parameters
|
||||
output = client.run(
|
||||
|
||||
@@ -52,7 +52,6 @@ class AudioTrack(str, Enum):
|
||||
REFRESHER = ("Refresher",)
|
||||
TOURIST = ("Tourist",)
|
||||
TWIN_TYCHES = ("Twin Tyches",)
|
||||
DONT_STOP_ME_ABSTRACT_FUTURE_BASS = ("Dont Stop Me Abstract Future Bass",)
|
||||
|
||||
@property
|
||||
def audio_url(self):
|
||||
@@ -78,7 +77,6 @@ class AudioTrack(str, Enum):
|
||||
AudioTrack.REFRESHER: "https://cdn.tfrv.xyz/audio/refresher.mp3",
|
||||
AudioTrack.TOURIST: "https://cdn.tfrv.xyz/audio/tourist.mp3",
|
||||
AudioTrack.TWIN_TYCHES: "https://cdn.tfrv.xyz/audio/twin-tynches.mp3",
|
||||
AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS: "https://cdn.revid.ai/audio/_dont-stop-me-abstract-future-bass.mp3",
|
||||
}
|
||||
return audio_urls[self]
|
||||
|
||||
@@ -106,7 +104,6 @@ class GenerationPreset(str, Enum):
|
||||
MOVIE = ("Movie",)
|
||||
STYLIZED_ILLUSTRATION = ("Stylized Illustration",)
|
||||
MANGA = ("Manga",)
|
||||
DEFAULT = ("DEFAULT",)
|
||||
|
||||
|
||||
class Voice(str, Enum):
|
||||
@@ -116,7 +113,6 @@ class Voice(str, Enum):
|
||||
JESSICA = "Jessica"
|
||||
CHARLOTTE = "Charlotte"
|
||||
CALLUM = "Callum"
|
||||
EVA = "Eva"
|
||||
|
||||
@property
|
||||
def voice_id(self):
|
||||
@@ -127,7 +123,6 @@ class Voice(str, Enum):
|
||||
Voice.JESSICA: "cgSgspJ2msm6clMCkdW9",
|
||||
Voice.CHARLOTTE: "XB0fDUnXU5powFXDhCwa",
|
||||
Voice.CALLUM: "N2lVS1w4EtoT3dr4eOWO",
|
||||
Voice.EVA: "FGY2WhTYpPnrIDTdsKH5",
|
||||
}
|
||||
return voice_id_map[self]
|
||||
|
||||
@@ -144,54 +139,7 @@ class VisualMediaType(str, Enum):
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _RevidMixin:
|
||||
"""Utility mix‑in that bundles the shared webhook / polling helpers."""
|
||||
|
||||
def create_video(self, api_key: SecretStr, payload: dict) -> dict:
|
||||
url = "https://www.revid.ai/api/public/v2/render"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.post(url, json=payload, headers=headers)
|
||||
logger.debug(
|
||||
f"API Response Status Code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def check_video_status(self, api_key: SecretStr, pid: str) -> dict:
|
||||
url = f"https://www.revid.ai/api/public/v2/status?pid={pid}"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.get(url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
def wait_for_video(
|
||||
self,
|
||||
api_key: SecretStr,
|
||||
pid: str,
|
||||
max_wait_time: int = 3600,
|
||||
) -> str:
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < max_wait_time:
|
||||
status = self.check_video_status(api_key, pid)
|
||||
logger.debug(f"Video status: {status}")
|
||||
|
||||
if status.get("status") == "ready" and "videoUrl" in status:
|
||||
return status["videoUrl"]
|
||||
elif status.get("status") == "error":
|
||||
error_message = status.get("error", "Unknown error occurred")
|
||||
logger.error(f"Video creation failed: {error_message}")
|
||||
raise ValueError(f"Video creation failed: {error_message}")
|
||||
elif status.get("status") in ["FAILED", "CANCELED"]:
|
||||
logger.error(f"Video creation failed: {status.get('message')}")
|
||||
raise ValueError(f"Video creation failed: {status.get('message')}")
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
logger.error("Video creation timed out")
|
||||
raise TimeoutError("Video creation timed out")
|
||||
|
||||
|
||||
class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Creates a short‑form text‑to‑video clip using stock or AI imagery."""
|
||||
|
||||
class AIShortformVideoCreatorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
@@ -258,28 +206,86 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"https://example.com/video.mp4",
|
||||
),
|
||||
test_mock={
|
||||
"create_webhook": lambda: (
|
||||
"test_uuid",
|
||||
"https://webhook.site/test_uuid",
|
||||
),
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/video.mp4",
|
||||
"wait_for_video": lambda api_key, pid, webhook_token, max_wait_time=1000: "https://example.com/video.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def create_webhook(self):
|
||||
url = "https://webhook.site/token"
|
||||
headers = {"Accept": "application/json", "Content-Type": "application/json"}
|
||||
response = requests.post(url, headers=headers)
|
||||
webhook_data = response.json()
|
||||
return webhook_data["uuid"], f"https://webhook.site/{webhook_data['uuid']}"
|
||||
|
||||
def create_video(self, api_key: SecretStr, payload: dict) -> dict:
|
||||
url = "https://www.revid.ai/api/public/v2/render"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.post(url, json=payload, headers=headers)
|
||||
logger.debug(
|
||||
f"API Response Status Code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def check_video_status(self, api_key: SecretStr, pid: str) -> dict:
|
||||
url = f"https://www.revid.ai/api/public/v2/status?pid={pid}"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.get(url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
def wait_for_video(
|
||||
self,
|
||||
api_key: SecretStr,
|
||||
pid: str,
|
||||
webhook_token: str,
|
||||
max_wait_time: int = 1000,
|
||||
) -> str:
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < max_wait_time:
|
||||
status = self.check_video_status(api_key, pid)
|
||||
logger.debug(f"Video status: {status}")
|
||||
|
||||
if status.get("status") == "ready" and "videoUrl" in status:
|
||||
return status["videoUrl"]
|
||||
elif status.get("status") == "error":
|
||||
error_message = status.get("error", "Unknown error occurred")
|
||||
logger.error(f"Video creation failed: {error_message}")
|
||||
raise ValueError(f"Video creation failed: {error_message}")
|
||||
elif status.get("status") in ["FAILED", "CANCELED"]:
|
||||
logger.error(f"Video creation failed: {status.get('message')}")
|
||||
raise ValueError(f"Video creation failed: {status.get('message')}")
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
logger.error("Video creation timed out")
|
||||
raise TimeoutError("Video creation timed out")
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
# Create a new Webhook.site URL
|
||||
webhook_token, webhook_url = self.create_webhook()
|
||||
logger.debug(f"Webhook URL: {webhook_url}")
|
||||
|
||||
audio_url = input_data.background_music.audio_url
|
||||
|
||||
payload = {
|
||||
"frameRate": input_data.frame_rate,
|
||||
"resolution": input_data.resolution,
|
||||
"frameDurationMultiplier": 18,
|
||||
"webhook": None,
|
||||
"webhook": webhook_url,
|
||||
"creationParams": {
|
||||
"mediaType": input_data.video_style,
|
||||
"captionPresetName": "Wrap 1",
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"hasEnhancedGeneration": True,
|
||||
"generationPreset": input_data.generation_preset.name,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedAudio": input_data.background_music,
|
||||
"origin": "/create",
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
@@ -295,7 +301,7 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"selectedStoryStyle": {"value": "custom", "label": "Custom"},
|
||||
"hasToGenerateVideos": input_data.video_style
|
||||
!= VisualMediaType.STOCK_VIDEOS,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"audioUrl": audio_url,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -308,354 +314,10 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
f"Failed to create video: No project ID returned. API Response: {response}"
|
||||
)
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
logger.debug(f"Video created with project ID: {pid}. Waiting for completion...")
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
logger.debug(f"Video ready: {video_url}")
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIAdMakerVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Generates a 30‑second vertical AI advert using optional user‑supplied imagery."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Credentials for Revid.ai API access.",
|
||||
)
|
||||
script: str = SchemaField(
|
||||
description="Short advertising copy. Line breaks create new scenes.",
|
||||
placeholder="Introducing Foobar – [show product photo] the gadget that does it all.",
|
||||
)
|
||||
ratio: str = SchemaField(description="Aspect ratio", default="9 / 16")
|
||||
target_duration: int = SchemaField(
|
||||
description="Desired length of the ad in seconds.", default=30
|
||||
)
|
||||
voice: Voice = SchemaField(
|
||||
description="Narration voice", default=Voice.EVA, placeholder=Voice.EVA
|
||||
)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
description="Background track",
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS,
|
||||
)
|
||||
input_media_urls: list[str] = SchemaField(
|
||||
description="List of image URLs to feature in the advert.", default=[]
|
||||
)
|
||||
use_only_provided_media: bool = SchemaField(
|
||||
description="Restrict visuals to supplied images only.", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="URL of the finished advert")
|
||||
error: str = SchemaField(description="Error message on failure")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3e3fd845-000e-457f-9f50-9f2f9e278bbd",
|
||||
description="Creates an AI‑generated 30‑second advert (text + images)",
|
||||
categories={BlockCategory.MARKETING, BlockCategory.AI},
|
||||
input_schema=AIAdMakerVideoCreatorBlock.Input,
|
||||
output_schema=AIAdMakerVideoCreatorBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"script": "Test product launch!",
|
||||
"input_media_urls": [
|
||||
"https://cdn.revid.ai/uploads/1747076315114-image.png",
|
||||
],
|
||||
},
|
||||
test_output=("video_url", "https://example.com/ad.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/ad.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"targetDuration": input_data.target_duration,
|
||||
"ratio": input_data.ratio,
|
||||
"mediaType": "aiVideo",
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
"slug": "ai-ad-generator",
|
||||
"slugNew": "",
|
||||
"isCopiedFrom": False,
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasAvatar": False,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"mediaMultiplier": "medium",
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": "",
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"forceModelType": "gpt-image-1",
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"useOnlyProvidedMedia": input_data.use_only_provided_media,
|
||||
"imageGenerationModel": "ultra",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [
|
||||
{"url": url, "title": "", "type": "image"}
|
||||
for url in input_data.input_media_urls
|
||||
],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIPromptToVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Turns a single creative prompt into a fully AI‑generated video."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(description="Revid.ai API credentials")
|
||||
prompt: str = SchemaField(
|
||||
description="Imaginative prompt describing the desired video.",
|
||||
placeholder="A neon‑lit cyberpunk alley with rain‑soaked pavements.",
|
||||
)
|
||||
ratio: str = SchemaField(default="9 / 16")
|
||||
prompt_target_duration: int = SchemaField(default=30)
|
||||
voice: Voice = SchemaField(default=Voice.EVA)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="Rendered video URL")
|
||||
error: str = SchemaField(description="Error message if any")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="46f4099c-ad01-4d79-874c-37a24c937ba3",
|
||||
description="Creates an AI video from a single prompt (no line‑breaking script).",
|
||||
categories={BlockCategory.AI, BlockCategory.SOCIAL},
|
||||
input_schema=AIPromptToVideoCreatorBlock.Input,
|
||||
output_schema=AIPromptToVideoCreatorBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"prompt": "Epic time‑lapse of a city skyline from day to night",
|
||||
},
|
||||
test_output=("video_url", "https://example.com/prompt.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/prompt.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"mediaType": "aiVideo",
|
||||
"flowType": "prompt-to-video",
|
||||
"slug": "prompt-to-video",
|
||||
"slugNew": "",
|
||||
"isCopiedFrom": False,
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasAvatar": False,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"ratio": input_data.ratio,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": input_data.prompt,
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"promptTargetDuration": input_data.prompt_target_duration,
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"imageGenerationModel": "good",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIScreenshotToVideoAdBlock(Block, _RevidMixin):
|
||||
"""Creates an advert where the supplied screenshot is narrated by an AI avatar."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(description="Revid.ai API key")
|
||||
script: str = SchemaField(
|
||||
description="Narration that will accompany the screenshot.",
|
||||
placeholder="Check out these amazing stats!",
|
||||
)
|
||||
screenshot_url: str = SchemaField(
|
||||
description="Screenshot or image URL to showcase."
|
||||
)
|
||||
ratio: str = SchemaField(default="9 / 16")
|
||||
target_duration: int = SchemaField(default=30)
|
||||
voice: Voice = SchemaField(default=Voice.EVA)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="Rendered video URL")
|
||||
error: str = SchemaField(description="Error, if encountered")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9f68982c-3af6-4923-9a97-b50a8c8d2234",
|
||||
description="Turns a screenshot into an engaging, avatar‑narrated video advert.",
|
||||
categories={BlockCategory.AI, BlockCategory.MARKETING},
|
||||
input_schema=AIScreenshotToVideoAdBlock.Input,
|
||||
output_schema=AIScreenshotToVideoAdBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"script": "Amazing numbers!",
|
||||
"screenshot_url": "https://cdn.revid.ai/uploads/1747080376028-image.png",
|
||||
},
|
||||
test_output=("video_url", "https://example.com/screenshot.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/screenshot.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"targetDuration": input_data.target_duration,
|
||||
"ratio": input_data.ratio,
|
||||
"mediaType": "aiVideo",
|
||||
"hasAvatar": True,
|
||||
"removeAvatarBackground": True,
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
"slug": "ai-ad-generator",
|
||||
"slugNew": "screenshot-to-video-ad",
|
||||
"isCopiedFrom": "ai-ad-generator",
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"mediaMultiplier": "medium",
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": "",
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"forceModelType": "gpt-image-1",
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"useOnlyProvidedMedia": True,
|
||||
"imageGenerationModel": "ultra",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [
|
||||
{"url": input_data.screenshot_url, "title": "", "type": "image"}
|
||||
],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
else:
|
||||
logger.debug(
|
||||
f"Video created with project ID: {pid}. Waiting for completion..."
|
||||
)
|
||||
video_url = self.wait_for_video(credentials.api_key, pid, webhook_token)
|
||||
logger.debug(f"Video ready: {video_url}")
|
||||
yield "video_url", video_url
|
||||
|
||||
440
autogpt_platform/backend/backend/blocks/airtable/_api.py
Normal file
440
autogpt_platform/backend/backend/blocks/airtable/_api.py
Normal file
@@ -0,0 +1,440 @@
|
||||
"""
|
||||
API module for Airtable API integration.
|
||||
|
||||
This module provides a client for interacting with the Airtable API,
|
||||
including methods for working with tables, fields, records, and webhooks.
|
||||
"""
|
||||
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import APIKeyCredentials
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class AirtableAPIException(Exception):
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
# Response Models
|
||||
class TableField(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
type: str
|
||||
options: Optional[Dict[str, Any]] = None
|
||||
|
||||
|
||||
class Table(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
fields: List[TableField]
|
||||
|
||||
|
||||
class Record(BaseModel):
|
||||
id: str
|
||||
fields: Dict[str, Any]
|
||||
createdTime: Optional[str] = None
|
||||
|
||||
|
||||
class RecordAttachment(BaseModel):
|
||||
id: str
|
||||
url: str
|
||||
filename: str
|
||||
size: Optional[int] = None
|
||||
type: Optional[str] = None
|
||||
|
||||
|
||||
class Webhook(BaseModel):
|
||||
id: str
|
||||
url: str
|
||||
event: str
|
||||
notification_url: Optional[str] = None
|
||||
active: bool
|
||||
|
||||
|
||||
class ListTablesResponse(BaseModel):
|
||||
tables: List[Table]
|
||||
|
||||
|
||||
class ListRecordsResponse(BaseModel):
|
||||
records: List[Record]
|
||||
offset: Optional[str] = None
|
||||
|
||||
|
||||
class ListAttachmentsResponse(BaseModel):
|
||||
attachments: List[RecordAttachment]
|
||||
offset: Optional[str] = None
|
||||
|
||||
|
||||
class ListWebhooksResponse(BaseModel):
|
||||
webhooks: List[Webhook]
|
||||
offset: Optional[str] = None
|
||||
|
||||
|
||||
class AirtableClient:
|
||||
"""Client for the Airtable API"""
|
||||
|
||||
API_BASE_URL = "https://api.airtable.com/v0"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
credentials: Optional[APIKeyCredentials] = None,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
headers: dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
if credentials:
|
||||
headers["Authorization"] = (
|
||||
f"Bearer {credentials.api_key.get_secret_value()}"
|
||||
)
|
||||
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _handle_response(response) -> Any:
|
||||
"""
|
||||
Handles API response and checks for errors.
|
||||
|
||||
Args:
|
||||
response: The response object from the request.
|
||||
|
||||
Returns:
|
||||
The parsed JSON response data.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("error", {}).get("message", "")
|
||||
except JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AirtableAPIException(
|
||||
f"Airtable API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
# Table Methods
|
||||
def list_tables(self, base_id: str) -> ListTablesResponse:
|
||||
"""
|
||||
List all tables in a base.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base to list tables from.
|
||||
|
||||
Returns:
|
||||
ListTablesResponse: Object containing the list of tables.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.get(f"{self.API_BASE_URL}/bases/{base_id}/tables")
|
||||
data = self._handle_response(response)
|
||||
return ListTablesResponse(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to list tables: {str(e)}", 500)
|
||||
|
||||
def get_table(self, base_id: str, table_id: str) -> Table:
|
||||
"""
|
||||
Get a specific table schema.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table to retrieve.
|
||||
|
||||
Returns:
|
||||
Table: The table object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.get(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}"
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return Table(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to get table: {str(e)}", 500)
|
||||
|
||||
def create_table(
|
||||
self, base_id: str, name: str, description: str, fields: List[Dict[str, Any]]
|
||||
) -> Table:
|
||||
"""
|
||||
Create a new table in a base.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base to create the table in.
|
||||
name: The name of the new table.
|
||||
description: The description of the new table.
|
||||
fields: The fields to create in the new table.
|
||||
|
||||
Returns:
|
||||
Table: The created table object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
payload = {
|
||||
"name": name,
|
||||
"description": description,
|
||||
"fields": fields,
|
||||
}
|
||||
response = self._requests.post(
|
||||
f"{self.API_BASE_URL}/meta/bases/{base_id}/tables", json=payload
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return Table(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to create table: {str(e)}", 500)
|
||||
|
||||
# Field Methods
|
||||
def list_fields(self, base_id: str, table_id: str) -> List[TableField]:
|
||||
"""
|
||||
List all fields in a table.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table to list fields from.
|
||||
|
||||
Returns:
|
||||
List[TableField]: List of field objects.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.get(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/fields"
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return [TableField(**field) for field in data.get("fields", [])]
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to list fields: {str(e)}", 500)
|
||||
|
||||
def get_field(self, base_id: str, table_id: str, field_id: str) -> TableField:
|
||||
"""
|
||||
Get a specific field.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table containing the field.
|
||||
field_id: The ID of the field to retrieve.
|
||||
|
||||
Returns:
|
||||
TableField: The field object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.get(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/fields/{field_id}"
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return TableField(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to get field: {str(e)}", 500)
|
||||
|
||||
def create_field(
|
||||
self,
|
||||
base_id: str,
|
||||
table_id: str,
|
||||
name: str,
|
||||
field_type: str,
|
||||
options: Optional[Dict[str, Any]] = None,
|
||||
) -> TableField:
|
||||
"""
|
||||
Create a new field in a table.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table to create the field in.
|
||||
name: The name of the new field.
|
||||
field_type: The type of the new field.
|
||||
options: Optional field type options.
|
||||
|
||||
Returns:
|
||||
TableField: The created field object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
payload = {
|
||||
"name": name,
|
||||
"type": field_type,
|
||||
}
|
||||
if options:
|
||||
payload["options"] = options
|
||||
|
||||
response = self._requests.post(
|
||||
f"{self.API_BASE_URL}/meta/bases/{base_id}/tables/{table_id}/fields",
|
||||
json=payload,
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return TableField(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to create field: {str(e)}", 500)
|
||||
|
||||
# Record Methods
|
||||
def list_records(
|
||||
self,
|
||||
base_id: str,
|
||||
table_id: str,
|
||||
filter_formula: Optional[str] = None,
|
||||
offset: Optional[str] = None,
|
||||
) -> ListRecordsResponse:
|
||||
"""
|
||||
List records in a table, with optional filtering.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table to list records from.
|
||||
filter_formula: Optional formula to filter records.
|
||||
offset: Optional pagination offset.
|
||||
|
||||
Returns:
|
||||
ListRecordsResponse: Object containing the list of records.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
params = {}
|
||||
if filter_formula:
|
||||
params["filterByFormula"] = filter_formula
|
||||
if offset:
|
||||
params["offset"] = offset
|
||||
|
||||
response = self._requests.get(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/records",
|
||||
params=params,
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return ListRecordsResponse(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to list records: {str(e)}", 500)
|
||||
|
||||
def get_record(self, base_id: str, table_id: str, record_id: str) -> Record:
|
||||
"""
|
||||
Get a specific record.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table containing the record.
|
||||
record_id: The ID of the record to retrieve.
|
||||
|
||||
Returns:
|
||||
Record: The record object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.get(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/records/{record_id}"
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return Record(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to get record: {str(e)}", 500)
|
||||
|
||||
def create_record(
|
||||
self, base_id: str, table_id: str, fields: Dict[str, Any]
|
||||
) -> Record:
|
||||
"""
|
||||
Create a new record in a table.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table to create the record in.
|
||||
fields: The field values for the new record.
|
||||
|
||||
Returns:
|
||||
Record: The created record object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
payload = {"fields": fields}
|
||||
response = self._requests.post(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/records",
|
||||
json=payload,
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return Record(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to create record: {str(e)}", 500)
|
||||
|
||||
def update_record(
|
||||
self, base_id: str, table_id: str, record_id: str, fields: Dict[str, Any]
|
||||
) -> Record:
|
||||
"""
|
||||
Update a record in a table.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table containing the record.
|
||||
record_id: The ID of the record to update.
|
||||
fields: The field values to update.
|
||||
|
||||
Returns:
|
||||
Record: The updated record object.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
payload = {"fields": fields}
|
||||
response = self._requests.patch(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/records/{record_id}",
|
||||
json=payload,
|
||||
)
|
||||
data = self._handle_response(response)
|
||||
return Record(**data)
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to update record: {str(e)}", 500)
|
||||
|
||||
def delete_record(self, base_id: str, table_id: str, record_id: str) -> bool:
|
||||
"""
|
||||
Delete a record from a table.
|
||||
|
||||
Args:
|
||||
base_id: The ID of the base containing the table.
|
||||
table_id: The ID of the table containing the record.
|
||||
record_id: The ID of the record to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if the deletion was successful.
|
||||
|
||||
Raises:
|
||||
AirtableAPIException: If the API request fails.
|
||||
"""
|
||||
try:
|
||||
response = self._requests.delete(
|
||||
f"{self.API_BASE_URL}/bases/{base_id}/tables/{table_id}/records/{record_id}"
|
||||
)
|
||||
self._handle_response(response)
|
||||
return True
|
||||
except Exception as e:
|
||||
raise AirtableAPIException(f"Failed to delete record: {str(e)}", 500)
|
||||
37
autogpt_platform/backend/backend/blocks/airtable/_auth.py
Normal file
37
autogpt_platform/backend/backend/blocks/airtable/_auth.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""
|
||||
Authentication module for Airtable API integration.
|
||||
|
||||
This module provides credential types and test credentials for the Airtable API integration.
|
||||
It defines the structure for API key credentials used to authenticate with the Airtable API
|
||||
and provides mock credentials for testing purposes.
|
||||
"""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
# Define the type of credentials input expected for Airtable API
|
||||
AirtableCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.AIRTABLE], Literal["api_key"]
|
||||
]
|
||||
|
||||
|
||||
# Mock credentials for testing Airtable API integration
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="7a91c8f0-399f-4235-a79c-59c0e37454d5",
|
||||
provider="airtable",
|
||||
api_key=SecretStr("mock-airtable-api-key"),
|
||||
title="Mock Airtable API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
# Dictionary representation of test credentials for input fields
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
384
autogpt_platform/backend/backend/blocks/airtable/airtable.py
Normal file
384
autogpt_platform/backend/backend/blocks/airtable/airtable.py
Normal file
@@ -0,0 +1,384 @@
|
||||
"""
|
||||
Airtable API integration blocks.
|
||||
|
||||
This module provides blocks for interacting with the Airtable API,
|
||||
including operations for tables, fields, and records.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
|
||||
|
||||
from ._api import AirtableAPIException, AirtableClient
|
||||
from ._auth import TEST_CREDENTIALS, TEST_CREDENTIALS_INPUT, AirtableCredentialsInput
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Common response models
|
||||
class AirtableTable(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class AirtableField(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
type: str
|
||||
|
||||
|
||||
class AirtableRecord(BaseModel):
|
||||
id: str
|
||||
fields: Dict[str, any]
|
||||
created_time: Optional[str] = None
|
||||
|
||||
|
||||
class AirtableTablesBlock(Block):
|
||||
"""Block for listing, getting, and creating tables in Airtable."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
base_id: str = SchemaField(
|
||||
description="The ID of the Airtable base",
|
||||
placeholder="appXXXXXXXXXXXXXX",
|
||||
)
|
||||
operation: str = SchemaField(
|
||||
description="The operation to perform on tables",
|
||||
placeholder="list",
|
||||
choices=["list", "get", "create"],
|
||||
)
|
||||
table_id: Optional[str] = SchemaField(
|
||||
description="The ID of the table (required for 'get' operation)",
|
||||
placeholder="tblXXXXXXXXXXXXXX",
|
||||
advanced=True,
|
||||
)
|
||||
table_name: Optional[str] = SchemaField(
|
||||
description="The name of the new table (required for 'create' operation)",
|
||||
placeholder="My New Table",
|
||||
advanced=True,
|
||||
)
|
||||
table_description: Optional[str] = SchemaField(
|
||||
description="The description of the new table (for 'create' operation)",
|
||||
placeholder="Description of my table",
|
||||
advanced=True,
|
||||
)
|
||||
fields: Optional[List[Dict[str, str]]] = SchemaField(
|
||||
description="The fields to create in the new table (for 'create' operation)",
|
||||
placeholder='[{"name": "Name", "type": "text"}]',
|
||||
advanced=True,
|
||||
)
|
||||
credentials: AirtableCredentialsInput = CredentialsField(
|
||||
description="The credentials for the Airtable API"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
tables: Optional[List[AirtableTable]] = SchemaField(
|
||||
description="List of tables in the base"
|
||||
)
|
||||
table: Optional[AirtableTable] = SchemaField(
|
||||
description="The retrieved or created table"
|
||||
)
|
||||
error: Optional[str] = SchemaField(description="Error message if any")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="da53b48c-6e97-4c1c-afb9-4ecf10c81856",
|
||||
description="List, get, or create tables in an Airtable base",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=AirtableTablesBlock.Input,
|
||||
output_schema=AirtableTablesBlock.Output,
|
||||
test_input={
|
||||
"base_id": "appXXXXXXXXXXXXXX",
|
||||
"operation": "list",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
("tables", [AirtableTable(id="tbl123", name="Example Table")])
|
||||
],
|
||||
test_mock={
|
||||
"list_tables": lambda *args, **kwargs: {
|
||||
"tables": [{"id": "tbl123", "name": "Example Table"}]
|
||||
}
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Perform operations on Airtable tables.
|
||||
|
||||
Args:
|
||||
input_data: The input parameters for the block.
|
||||
credentials: The Airtable API credentials.
|
||||
|
||||
Yields:
|
||||
BlockOutput: The result of the table operation.
|
||||
"""
|
||||
try:
|
||||
client = AirtableClient(credentials=credentials)
|
||||
|
||||
if input_data.operation == "list":
|
||||
# List all tables in the base
|
||||
response = client.list_tables(input_data.base_id)
|
||||
tables = [
|
||||
AirtableTable(
|
||||
id=table.id, name=table.name, description=table.description
|
||||
)
|
||||
for table in response.tables
|
||||
]
|
||||
yield "tables", tables
|
||||
|
||||
elif input_data.operation == "get":
|
||||
# Get a specific table
|
||||
if not input_data.table_id:
|
||||
yield "error", "Table ID is required for 'get' operation"
|
||||
return
|
||||
|
||||
table = client.get_table(input_data.base_id, input_data.table_id)
|
||||
yield "table", AirtableTable(
|
||||
id=table.id, name=table.name, description=table.description
|
||||
)
|
||||
|
||||
elif input_data.operation == "create":
|
||||
# Create a new table
|
||||
if not input_data.table_name:
|
||||
yield "error", "Table name is required for 'create' operation"
|
||||
return
|
||||
if not input_data.fields or len(input_data.fields) == 0:
|
||||
yield "error", "At least one field is required for 'create' operation"
|
||||
return
|
||||
|
||||
table = client.create_table(
|
||||
input_data.base_id,
|
||||
input_data.table_name,
|
||||
input_data.table_description or "",
|
||||
input_data.fields,
|
||||
)
|
||||
yield "table", AirtableTable(
|
||||
id=table.id, name=table.name, description=table.description
|
||||
)
|
||||
|
||||
else:
|
||||
yield "error", f"Unknown operation: {input_data.operation}"
|
||||
|
||||
except AirtableAPIException as e:
|
||||
yield "error", f"Airtable API error: {str(e)}"
|
||||
except Exception as e:
|
||||
logger.exception("Error in AirtableTablesBlock")
|
||||
yield "error", f"Error: {str(e)}"
|
||||
|
||||
|
||||
class AirtableFieldsBlock(Block):
|
||||
"""Block for listing, getting, and creating fields in Airtable tables."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
base_id: str = SchemaField(
|
||||
description="The ID of the Airtable base",
|
||||
placeholder="appXXXXXXXXXXXXXX",
|
||||
)
|
||||
table_id: str = SchemaField(
|
||||
description="The ID of the table",
|
||||
placeholder="tblXXXXXXXXXXXXXX",
|
||||
)
|
||||
operation: str = SchemaField(
|
||||
description="The operation to perform on fields",
|
||||
placeholder="list",
|
||||
choices=["list", "get", "create"],
|
||||
)
|
||||
field_id: Optional[str] = SchemaField(
|
||||
description="The ID of the field (required for 'get' operation)",
|
||||
placeholder="fldXXXXXXXXXXXXXX",
|
||||
advanced=True,
|
||||
)
|
||||
field_name: Optional[str] = SchemaField(
|
||||
description="The name of the new field (required for 'create' operation)",
|
||||
placeholder="My New Field",
|
||||
advanced=True,
|
||||
)
|
||||
field_type: Optional[str] = SchemaField(
|
||||
description="The type of the new field (required for 'create' operation)",
|
||||
placeholder="text",
|
||||
advanced=True,
|
||||
choices=[
|
||||
"text",
|
||||
"number",
|
||||
"checkbox",
|
||||
"singleSelect",
|
||||
"multipleSelects",
|
||||
"date",
|
||||
"dateTime",
|
||||
"attachment",
|
||||
"link",
|
||||
"multipleRecordLinks",
|
||||
"formula",
|
||||
"rollup",
|
||||
"count",
|
||||
"lookup",
|
||||
"currency",
|
||||
"percent",
|
||||
"duration",
|
||||
"rating",
|
||||
"richText",
|
||||
"barcode",
|
||||
"button",
|
||||
],
|
||||
)
|
||||
credentials: AirtableCredentialsInput = CredentialsField(
|
||||
description="The credentials for the Airtable API"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
fields: Optional[List[AirtableField]] = SchemaField(
|
||||
description="List of fields in the table"
|
||||
)
|
||||
field: Optional[AirtableField] = SchemaField(
|
||||
description="The retrieved or created field"
|
||||
)
|
||||
error: Optional[str] = SchemaField(description="Error message if any")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c27a6a11-8c09-4f8c-afeb-82c7a0c81857",
|
||||
description="List, get, or create fields in an Airtable table",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=AirtableFieldsBlock.Input,
|
||||
output_schema=AirtableFieldsBlock.Output,
|
||||
test_input={
|
||||
"base_id": "appXXXXXXXXXXXXXX",
|
||||
"table_id": "tblXXXXXXXXXXXXXX",
|
||||
"operation": "list",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
("fields", [AirtableField(id="fld123", name="Name", type="text")])
|
||||
],
|
||||
test_mock={
|
||||
"list_fields": lambda *args, **kwargs: [
|
||||
{"id": "fld123", "name": "Name", "type": "text"}
|
||||
]
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Perform operations on Airtable fields.
|
||||
|
||||
Args:
|
||||
input_data: The input parameters for the block.
|
||||
credentials: The Airtable API credentials.
|
||||
|
||||
Yields:
|
||||
BlockOutput: The result of the field operation.
|
||||
"""
|
||||
try:
|
||||
client = AirtableClient(credentials=credentials)
|
||||
|
||||
if input_data.operation == "list":
|
||||
# List all fields in the table
|
||||
fields_list = client.list_fields(
|
||||
input_data.base_id, input_data.table_id
|
||||
)
|
||||
fields = [
|
||||
AirtableField(id=field.id, name=field.name, type=field.type)
|
||||
for field in fields_list
|
||||
]
|
||||
yield "fields", fields
|
||||
|
||||
elif input_data.operation == "get":
|
||||
# Get a specific field
|
||||
if not input_data.field_id:
|
||||
yield "error", "Field ID is required for 'get' operation"
|
||||
return
|
||||
|
||||
field = client.get_field(
|
||||
input_data.base_id, input_data.table_id, input_data.field_id
|
||||
)
|
||||
yield "field", AirtableField(
|
||||
id=field.id, name=field.name, type=field.type
|
||||
)
|
||||
|
||||
elif input_data.operation == "create":
|
||||
# Create a new field
|
||||
if not input_data.field_name:
|
||||
yield "error", "Field name is required for 'create' operation"
|
||||
return
|
||||
if not input_data.field_type:
|
||||
yield "error", "Field type is required for 'create' operation"
|
||||
return
|
||||
|
||||
field = client.create_field(
|
||||
input_data.base_id,
|
||||
input_data.table_id,
|
||||
input_data.field_name,
|
||||
input_data.field_type,
|
||||
)
|
||||
yield "field", AirtableField(
|
||||
id=field.id, name=field.name, type=field.type
|
||||
)
|
||||
|
||||
else:
|
||||
yield "error", f"Unknown operation: {input_data.operation}"
|
||||
|
||||
except AirtableAPIException as e:
|
||||
yield "error", f"Airtable API error: {str(e)}"
|
||||
except Exception as e:
|
||||
logger.exception("Error in AirtableFieldsBlock")
|
||||
yield "error", f"Error: {str(e)}"
|
||||
|
||||
|
||||
class AirtableRecordsBlock(Block):
|
||||
"""Block for creating, reading, updating, and deleting records in Airtable."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
base_id: str = SchemaField(
|
||||
description="The ID of the Airtable base",
|
||||
placeholder="appXXXXXXXXXXXXXX",
|
||||
)
|
||||
table_id: str = SchemaField(
|
||||
description="The ID of the table",
|
||||
placeholder="tblXXXXXXXXXXXXXX",
|
||||
)
|
||||
operation: str = SchemaField(
|
||||
description="The operation to perform on records",
|
||||
placeholder="list",
|
||||
choices=["list", "get", "create", "update", "delete"],
|
||||
)
|
||||
record_id: Optional[str] = SchemaField(
|
||||
description="The ID of the record (required for 'get', 'update', and 'delete' operations)",
|
||||
placeholder="recXXXXXXXXXXXXXX",
|
||||
advanced=True,
|
||||
)
|
||||
filter_formula: Optional[str] = SchemaField(
|
||||
description="Filter formula for listing records (optional for 'list' operation)",
|
||||
placeholder="{Field}='Value'",
|
||||
advanced=True,
|
||||
)
|
||||
fields: Optional[Dict[str, any]] = SchemaField(
|
||||
description="The field values (required for 'create' and 'update' operations)",
|
||||
placeholder='{"Name": "John Doe", "Email": "john@example.com"}',
|
||||
advanced=True,
|
||||
)
|
||||
credentials: AirtableCredentialsInput = CredentialsField(
|
||||
description="The credentials for the Airtable API"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
records: Optional[List[AirtableRecord]] = SchemaField(
|
||||
description="List of records in the table"
|
||||
)
|
||||
record: Optional[AirtableRecord] = SchemaField(
|
||||
description="The retrieved, created, or updated record"
|
||||
)
|
||||
success: Optional[bool] = SchemaField(
|
||||
description="Success status for delete operation"
|
||||
)
|
||||
error: Optional[str] = SchemaField(description="Error message if any")
|
||||
87
autogpt_platform/backend/backend/blocks/airtable/triggers.py
Normal file
87
autogpt_platform/backend/backend/blocks/airtable/triggers.py
Normal file
@@ -0,0 +1,87 @@
|
||||
"""
|
||||
Module for Airtable webhook triggers.
|
||||
|
||||
This module provides trigger blocks that respond to Airtable webhook events.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockManualWebhookConfig,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AirtableWebhookEventType(StrEnum):
|
||||
"""Types of webhook events supported by Airtable."""
|
||||
|
||||
RECORDS_CREATED = "records:created"
|
||||
RECORDS_UPDATED = "records:updated"
|
||||
RECORDS_DELETED = "records:deleted"
|
||||
|
||||
|
||||
class AirtableWebhookTriggerBlock(Block):
|
||||
"""
|
||||
A trigger block that responds to Airtable webhook events.
|
||||
This block is activated when a webhook event is received from Airtable.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
# The payload field is hidden because it's automatically populated by the webhook system
|
||||
payload: Dict = SchemaField(hidden=True)
|
||||
|
||||
class Output(BlockSchema):
|
||||
event_data: Dict = SchemaField(
|
||||
description="The contents of the Airtable webhook event."
|
||||
)
|
||||
base_id: str = SchemaField(description="The ID of the Airtable base.")
|
||||
table_id: str = SchemaField(description="The ID of the Airtable table.")
|
||||
event_type: str = SchemaField(description="The type of event that occurred.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8c3b52d1-f7e9-4c5d-a6f1-60e937d94d2a",
|
||||
description="This block will output the contents of an Airtable webhook event.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=AirtableWebhookTriggerBlock.Input,
|
||||
output_schema=AirtableWebhookTriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider="airtable",
|
||||
webhook_type=AirtableWebhookEventType.RECORDS_UPDATED,
|
||||
),
|
||||
test_input=[
|
||||
{
|
||||
"payload": {
|
||||
"baseId": "app123",
|
||||
"tableId": "tbl456",
|
||||
"event": "records:updated",
|
||||
"data": {},
|
||||
}
|
||||
}
|
||||
],
|
||||
test_output=[
|
||||
(
|
||||
"event_data",
|
||||
{
|
||||
"baseId": "app123",
|
||||
"tableId": "tbl456",
|
||||
"event": "records:updated",
|
||||
"data": {},
|
||||
},
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
"""Process the Airtable webhook event and yield its contents."""
|
||||
logger.info("Airtable webhook trigger received payload: %s", input_data.payload)
|
||||
yield "event_data", input_data.payload
|
||||
@@ -1,7 +1,7 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
@@ -143,12 +143,11 @@ class ContactEmail(BaseModel):
|
||||
class EmploymentHistory(BaseModel):
|
||||
"""An employment history in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
class Config:
|
||||
extra = "allow"
|
||||
arbitrary_types_allowed = True
|
||||
from_attributes = True
|
||||
populate_by_name = True
|
||||
|
||||
_id: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
@@ -189,12 +188,11 @@ class TypedCustomField(BaseModel):
|
||||
class Pagination(BaseModel):
|
||||
"""Pagination in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
class Config:
|
||||
extra = "allow" # Allow extra fields
|
||||
arbitrary_types_allowed = True # Allow any type
|
||||
from_attributes = True # Allow from_orm
|
||||
populate_by_name = True # Allow field aliases to work both ways
|
||||
|
||||
page: int = 0
|
||||
per_page: int = 0
|
||||
@@ -232,12 +230,11 @@ class PhoneNumber(BaseModel):
|
||||
class Organization(BaseModel):
|
||||
"""An organization in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
class Config:
|
||||
extra = "allow"
|
||||
arbitrary_types_allowed = True
|
||||
from_attributes = True
|
||||
populate_by_name = True
|
||||
|
||||
id: Optional[str] = "N/A"
|
||||
name: Optional[str] = "N/A"
|
||||
@@ -271,12 +268,11 @@ class Organization(BaseModel):
|
||||
class Contact(BaseModel):
|
||||
"""A contact in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
class Config:
|
||||
extra = "allow"
|
||||
arbitrary_types_allowed = True
|
||||
from_attributes = True
|
||||
populate_by_name = True
|
||||
|
||||
contact_roles: list[Any] = []
|
||||
id: Optional[str] = None
|
||||
@@ -373,14 +369,14 @@ If a company has several office locations, results are still based on the headqu
|
||||
|
||||
To exclude companies based on location, use the organization_not_locations parameter.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organizations_not_locations: list[str] = SchemaField(
|
||||
description="""Exclude companies from search results based on the location of the company headquarters. You can use cities, US states, and countries as locations to exclude.
|
||||
|
||||
This parameter is useful for ensuring you do not prospect in an undesirable territory. For example, if you use ireland as a value, no Ireland-based companies will appear in your search results.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
q_organization_keyword_tags: list[str] = SchemaField(
|
||||
description="""Filter search results based on keywords associated with companies. For example, you can enter mining as a value to return only companies that have an association with the mining industry."""
|
||||
@@ -394,7 +390,7 @@ If the value you enter for this parameter does not match with a company's name,
|
||||
description="""The Apollo IDs for the companies you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, identify the values for organization_id when you call this endpoint.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
@@ -447,14 +443,14 @@ Results also include job titles with the same terms, even if they are not exact
|
||||
|
||||
Use this parameter in combination with the person_seniorities[] parameter to find people based on specific job functions and seniority levels.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
placeholder="marketing manager",
|
||||
)
|
||||
person_locations: list[str] = SchemaField(
|
||||
description="""The location where people live. You can search across cities, US states, and countries.
|
||||
|
||||
To find people based on the headquarters locations of their current employer, use the organization_locations parameter.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
person_seniorities: list[SenorityLevels] = SchemaField(
|
||||
description="""The job seniority that people hold within their current employer. This enables you to find people that currently hold positions at certain reporting levels, such as Director level or senior IC level.
|
||||
@@ -464,7 +460,7 @@ For a person to be included in search results, they only need to match 1 of the
|
||||
Searches only return results based on their current job title, so searching for Director-level employees only returns people that currently hold a Director-level title. If someone was previously a Director, but is currently a VP, they would not be included in your search results.
|
||||
|
||||
Use this parameter in combination with the person_titles[] parameter to find people based on specific job functions and seniority levels.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organization_locations: list[str] = SchemaField(
|
||||
description="""The location of the company headquarters for a person's current employer. You can search across cities, US states, and countries.
|
||||
@@ -472,7 +468,7 @@ Use this parameter in combination with the person_titles[] parameter to find peo
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, people that work for the Boston-based company will not appear in your results, even if they match other parameters.
|
||||
|
||||
To find people based on their personal location, use the person_locations parameter.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
q_organization_domains: list[str] = SchemaField(
|
||||
description="""The domain name for the person's employer. This can be the current employer or a previous employer. Do not include www., the @ symbol, or similar.
|
||||
@@ -480,23 +476,23 @@ To find people based on their personal location, use the person_locations parame
|
||||
You can add multiple domains to search across companies.
|
||||
|
||||
Examples: apollo.io and microsoft.com""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
contact_email_statuses: list[ContactEmailStatuses] = SchemaField(
|
||||
description="""The email statuses for the people you want to find. You can add multiple statuses to expand your search.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies (employers) you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, call the Organization Search endpoint and identify the values for organization_id.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
q_keywords: str = SchemaField(
|
||||
description="""A string of words over which we want to filter the results""",
|
||||
@@ -526,12 +522,11 @@ Use the page parameter to search the different pages of data.""",
|
||||
class SearchPeopleResponse(BaseModel):
|
||||
"""Response from Apollo's search people API"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
class Config:
|
||||
extra = "allow" # Allow extra fields
|
||||
arbitrary_types_allowed = True # Allow any type
|
||||
from_attributes = True # Allow from_orm
|
||||
populate_by_name = True # Allow field aliases to work both ways
|
||||
|
||||
breadcrumbs: list[Breadcrumb] = []
|
||||
partial_results_only: bool = True
|
||||
|
||||
@@ -32,18 +32,18 @@ If a company has several office locations, results are still based on the headqu
|
||||
|
||||
To exclude companies based on location, use the organization_not_locations parameter.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organizations_not_locations: list[str] = SchemaField(
|
||||
description="""Exclude companies from search results based on the location of the company headquarters. You can use cities, US states, and countries as locations to exclude.
|
||||
|
||||
This parameter is useful for ensuring you do not prospect in an undesirable territory. For example, if you use ireland as a value, no Ireland-based companies will appear in your search results.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
q_organization_keyword_tags: list[str] = SchemaField(
|
||||
description="""Filter search results based on keywords associated with companies. For example, you can enter mining as a value to return only companies that have an association with the mining industry.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
q_organization_name: str = SchemaField(
|
||||
description="""Filter search results to include a specific company name.
|
||||
@@ -56,7 +56,7 @@ If the value you enter for this parameter does not match with a company's name,
|
||||
description="""The Apollo IDs for the companies you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, identify the values for organization_id when you call this endpoint.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
@@ -72,7 +72,7 @@ To find IDs, identify the values for organization_id when you call this endpoint
|
||||
class Output(BlockSchema):
|
||||
organizations: list[Organization] = SchemaField(
|
||||
description="List of organizations found",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
organization: Organization = SchemaField(
|
||||
description="Each found organization, one at a time",
|
||||
|
||||
@@ -26,14 +26,14 @@ class SearchPeopleBlock(Block):
|
||||
|
||||
Use this parameter in combination with the person_seniorities[] parameter to find people based on specific job functions and seniority levels.
|
||||
""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
person_locations: list[str] = SchemaField(
|
||||
description="""The location where people live. You can search across cities, US states, and countries.
|
||||
|
||||
To find people based on the headquarters locations of their current employer, use the organization_locations parameter.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
person_seniorities: list[SenorityLevels] = SchemaField(
|
||||
@@ -44,7 +44,7 @@ class SearchPeopleBlock(Block):
|
||||
Searches only return results based on their current job title, so searching for Director-level employees only returns people that currently hold a Director-level title. If someone was previously a Director, but is currently a VP, they would not be included in your search results.
|
||||
|
||||
Use this parameter in combination with the person_titles[] parameter to find people based on specific job functions and seniority levels.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
organization_locations: list[str] = SchemaField(
|
||||
@@ -53,7 +53,7 @@ class SearchPeopleBlock(Block):
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, people that work for the Boston-based company will not appear in your results, even if they match other parameters.
|
||||
|
||||
To find people based on their personal location, use the person_locations parameter.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
q_organization_domains: list[str] = SchemaField(
|
||||
@@ -62,26 +62,26 @@ class SearchPeopleBlock(Block):
|
||||
You can add multiple domains to search across companies.
|
||||
|
||||
Examples: apollo.io and microsoft.com""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
contact_email_statuses: list[ContactEmailStatuses] = SchemaField(
|
||||
description="""The email statuses for the people you want to find. You can add multiple statuses to expand your search.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies (employers) you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, call the Organization Search endpoint and identify the values for organization_id.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
q_keywords: str = SchemaField(
|
||||
@@ -104,7 +104,7 @@ class SearchPeopleBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
people: list[Contact] = SchemaField(
|
||||
description="List of people found",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
person: Contact = SchemaField(
|
||||
description="Each found person, one at a time",
|
||||
|
||||
@@ -1,482 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from backend.util.request import Requests
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
settings = Settings()
|
||||
|
||||
|
||||
class AyrshareAPIException(Exception):
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class SocialPlatform(str, Enum):
|
||||
BLUESKY = "bluesky"
|
||||
FACEBOOK = "facebook"
|
||||
TWITTER = "twitter"
|
||||
LINKEDIN = "linkedin"
|
||||
INSTAGRAM = "instagram"
|
||||
YOUTUBE = "youtube"
|
||||
REDDIT = "reddit"
|
||||
TELEGRAM = "telegram"
|
||||
GMB = "gmb"
|
||||
PINTEREST = "pinterest"
|
||||
TIKTOK = "tiktok"
|
||||
|
||||
|
||||
@dataclass
|
||||
class EmailConfig:
|
||||
to: str
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
from_name: Optional[str] = None
|
||||
from_email: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class JWTResponse:
|
||||
status: str
|
||||
title: str
|
||||
token: str
|
||||
url: str
|
||||
emailSent: Optional[bool] = None
|
||||
expiresIn: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfileResponse:
|
||||
status: str
|
||||
title: str
|
||||
refId: str
|
||||
profileKey: str
|
||||
messagingActive: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PostResponse:
|
||||
status: str
|
||||
id: str
|
||||
refId: str
|
||||
profileTitle: str
|
||||
post: str
|
||||
postIds: Optional[List[Dict[str, Any]]] = None
|
||||
scheduleDate: Optional[str] = None
|
||||
errors: Optional[List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoHashtag:
|
||||
max: Optional[int] = None
|
||||
position: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FirstComment:
|
||||
text: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoSchedule:
|
||||
interval: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
startDate: Optional[str] = None
|
||||
endDate: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoRepost:
|
||||
interval: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
startDate: Optional[str] = None
|
||||
endDate: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PostError:
|
||||
code: int
|
||||
message: str
|
||||
details: str
|
||||
|
||||
|
||||
class AyrshareClient:
|
||||
"""Client for the Ayrshare Social Media Post API"""
|
||||
|
||||
API_URL = "https://api.ayrshare.com/api"
|
||||
POST_ENDPOINT = f"{API_URL}/post"
|
||||
PROFILES_ENDPOINT = f"{API_URL}/profiles"
|
||||
JWT_ENDPOINT = f"{PROFILES_ENDPOINT}/generateJWT"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
headers: Dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {settings.secrets.ayrshare_api_key}",
|
||||
}
|
||||
self.headers = headers
|
||||
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
trusted_origins=["https://api.ayrshare.com"],
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
def generate_jwt(
|
||||
self,
|
||||
private_key: str,
|
||||
profile_key: str,
|
||||
logout: Optional[bool] = None,
|
||||
redirect: Optional[str] = None,
|
||||
allowed_social: Optional[List[SocialPlatform]] = None,
|
||||
verify: Optional[bool] = None,
|
||||
base64: Optional[bool] = None,
|
||||
expires_in: Optional[int] = None,
|
||||
email: Optional[EmailConfig] = None,
|
||||
) -> JWTResponse:
|
||||
"""
|
||||
Generate a JSON Web Token (JWT) for use with single sign on.
|
||||
|
||||
Args:
|
||||
domain: Domain of app. Must match the domain given during onboarding.
|
||||
private_key: Private Key used for encryption.
|
||||
profile_key: User Profile Key (not the API Key).
|
||||
logout: Automatically logout the current session.
|
||||
redirect: URL to redirect to when the "Done" button or logo is clicked.
|
||||
allowed_social: List of social networks to display in the linking page.
|
||||
verify: Verify that the generated token is valid (recommended for non-production).
|
||||
base64: Whether the private key is base64 encoded.
|
||||
expires_in: Token longevity in minutes (1-2880).
|
||||
email: Configuration for sending Connect Accounts email.
|
||||
|
||||
Returns:
|
||||
JWTResponse object containing the JWT token and URL.
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails or private key is invalid.
|
||||
"""
|
||||
payload: Dict[str, Any] = {
|
||||
"domain": "id-pojeg",
|
||||
"privateKey": private_key,
|
||||
"profileKey": profile_key,
|
||||
}
|
||||
|
||||
headers = self.headers
|
||||
headers["Profile-Key"] = profile_key
|
||||
if logout is not None:
|
||||
payload["logout"] = logout
|
||||
if redirect is not None:
|
||||
payload["redirect"] = redirect
|
||||
if allowed_social is not None:
|
||||
payload["allowedSocial"] = [p.value for p in allowed_social]
|
||||
if verify is not None:
|
||||
payload["verify"] = verify
|
||||
if base64 is not None:
|
||||
payload["base64"] = base64
|
||||
if expires_in is not None:
|
||||
payload["expiresIn"] = expires_in
|
||||
if email is not None:
|
||||
payload["email"] = email.__dict__
|
||||
|
||||
response = self._requests.post(self.JWT_ENDPOINT, json=payload, headers=headers)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return JWTResponse(**response_data)
|
||||
|
||||
def create_profile(
|
||||
self,
|
||||
title: str,
|
||||
messaging_active: Optional[bool] = None,
|
||||
hide_top_header: Optional[bool] = None,
|
||||
top_header: Optional[str] = None,
|
||||
disable_social: Optional[List[SocialPlatform]] = None,
|
||||
team: Optional[bool] = None,
|
||||
email: Optional[str] = None,
|
||||
sub_header: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
) -> ProfileResponse | PostError:
|
||||
"""
|
||||
Create a new User Profile under your Primary Profile.
|
||||
|
||||
Args:
|
||||
title: Title of the new profile. Must be unique.
|
||||
messaging_active: Set to true to activate messaging for this user profile.
|
||||
hide_top_header: Hide the top header on the social accounts linkage page.
|
||||
top_header: Change the header on the social accounts linkage page.
|
||||
disable_social: Array of social networks that are disabled for this user's profile.
|
||||
team: Create a new user profile as a team member.
|
||||
email: Email address for team member invite (required if team is true).
|
||||
sub_header: Change the sub header on the social accounts linkage page.
|
||||
tags: Array of strings to tag user profiles.
|
||||
|
||||
Returns:
|
||||
ProfileResponse object containing the profile details and profile key.
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails or profile title already exists.
|
||||
"""
|
||||
payload: Dict[str, Any] = {
|
||||
"title": title,
|
||||
}
|
||||
|
||||
if messaging_active is not None:
|
||||
payload["messagingActive"] = messaging_active
|
||||
if hide_top_header is not None:
|
||||
payload["hideTopHeader"] = hide_top_header
|
||||
if top_header is not None:
|
||||
payload["topHeader"] = top_header
|
||||
if disable_social is not None:
|
||||
payload["disableSocial"] = [p.value for p in disable_social]
|
||||
if team is not None:
|
||||
payload["team"] = team
|
||||
if email is not None:
|
||||
payload["email"] = email
|
||||
if sub_header is not None:
|
||||
payload["subHeader"] = sub_header
|
||||
if tags is not None:
|
||||
payload["tags"] = tags
|
||||
|
||||
response = self._requests.post(self.PROFILES_ENDPOINT, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return ProfileResponse(**response_data)
|
||||
|
||||
def create_post(
|
||||
self,
|
||||
post: str,
|
||||
platforms: List[SocialPlatform],
|
||||
media_urls: Optional[List[str]] = None,
|
||||
is_video: Optional[bool] = None,
|
||||
schedule_date: Optional[str] = None,
|
||||
first_comment: Optional[FirstComment] = None,
|
||||
disable_comments: Optional[bool] = None,
|
||||
shorten_links: Optional[bool] = None,
|
||||
auto_schedule: Optional[AutoSchedule] = None,
|
||||
auto_repost: Optional[AutoRepost] = None,
|
||||
auto_hashtag: Optional[Union[AutoHashtag, bool]] = None,
|
||||
unsplash: Optional[str] = None,
|
||||
bluesky_options: Optional[Dict[str, Any]] = None,
|
||||
facebook_options: Optional[Dict[str, Any]] = None,
|
||||
gmb_options: Optional[Dict[str, Any]] = None,
|
||||
instagram_options: Optional[Dict[str, Any]] = None,
|
||||
linkedin_options: Optional[Dict[str, Any]] = None,
|
||||
pinterest_options: Optional[Dict[str, Any]] = None,
|
||||
reddit_options: Optional[Dict[str, Any]] = None,
|
||||
telegram_options: Optional[Dict[str, Any]] = None,
|
||||
threads_options: Optional[Dict[str, Any]] = None,
|
||||
tiktok_options: Optional[Dict[str, Any]] = None,
|
||||
twitter_options: Optional[Dict[str, Any]] = None,
|
||||
youtube_options: Optional[Dict[str, Any]] = None,
|
||||
requires_approval: Optional[bool] = None,
|
||||
random_post: Optional[bool] = None,
|
||||
random_media_url: Optional[bool] = None,
|
||||
idempotency_key: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
profile_key: Optional[str] = None,
|
||||
) -> PostResponse | PostError:
|
||||
"""
|
||||
Create a post across multiple social media platforms.
|
||||
|
||||
Args:
|
||||
post: The post text to be published
|
||||
platforms: List of platforms to post to (e.g. [SocialPlatform.TWITTER, SocialPlatform.FACEBOOK])
|
||||
media_urls: Optional list of media URLs to include
|
||||
is_video: Whether the media is a video
|
||||
schedule_date: UTC datetime for scheduling (YYYY-MM-DDThh:mm:ssZ)
|
||||
first_comment: Configuration for first comment
|
||||
disable_comments: Whether to disable comments
|
||||
shorten_links: Whether to shorten links
|
||||
auto_schedule: Configuration for automatic scheduling
|
||||
auto_repost: Configuration for automatic reposting
|
||||
auto_hashtag: Configuration for automatic hashtags
|
||||
unsplash: Unsplash image configuration
|
||||
bluesky_options: Bluesky-specific options
|
||||
facebook_options: Facebook-specific options
|
||||
gmb_options: Google Business Profile options
|
||||
instagram_options: Instagram-specific options
|
||||
linkedin_options: LinkedIn-specific options
|
||||
pinterest_options: Pinterest-specific options
|
||||
reddit_options: Reddit-specific options
|
||||
telegram_options: Telegram-specific options
|
||||
threads_options: Threads-specific options
|
||||
tiktok_options: TikTok-specific options
|
||||
twitter_options: Twitter-specific options
|
||||
youtube_options: YouTube-specific options
|
||||
requires_approval: Whether to enable approval workflow
|
||||
random_post: Whether to generate random post text
|
||||
random_media_url: Whether to generate random media
|
||||
idempotency_key: Unique ID for the post
|
||||
notes: Additional notes for the post
|
||||
|
||||
Returns:
|
||||
PostResponse object containing the post details and status
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails
|
||||
"""
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"post": post,
|
||||
"platforms": [p.value for p in platforms],
|
||||
}
|
||||
|
||||
# Add optional parameters if provided
|
||||
if media_urls:
|
||||
payload["mediaUrls"] = media_urls
|
||||
if is_video is not None:
|
||||
payload["isVideo"] = is_video
|
||||
if schedule_date:
|
||||
payload["scheduleDate"] = schedule_date
|
||||
if first_comment:
|
||||
first_comment_dict = first_comment.__dict__.copy()
|
||||
if first_comment.platforms:
|
||||
first_comment_dict["platforms"] = [
|
||||
p.value for p in first_comment.platforms
|
||||
]
|
||||
payload["firstComment"] = first_comment_dict
|
||||
if disable_comments is not None:
|
||||
payload["disableComments"] = disable_comments
|
||||
if shorten_links is not None:
|
||||
payload["shortenLinks"] = shorten_links
|
||||
if auto_schedule:
|
||||
auto_schedule_dict = auto_schedule.__dict__.copy()
|
||||
if auto_schedule.platforms:
|
||||
auto_schedule_dict["platforms"] = [
|
||||
p.value for p in auto_schedule.platforms
|
||||
]
|
||||
payload["autoSchedule"] = auto_schedule_dict
|
||||
if auto_repost:
|
||||
auto_repost_dict = auto_repost.__dict__.copy()
|
||||
if auto_repost.platforms:
|
||||
auto_repost_dict["platforms"] = [p.value for p in auto_repost.platforms]
|
||||
payload["autoRepost"] = auto_repost_dict
|
||||
if auto_hashtag:
|
||||
payload["autoHashtag"] = (
|
||||
auto_hashtag.__dict__
|
||||
if isinstance(auto_hashtag, AutoHashtag)
|
||||
else auto_hashtag
|
||||
)
|
||||
if unsplash:
|
||||
payload["unsplash"] = unsplash
|
||||
if bluesky_options:
|
||||
payload["blueskyOptions"] = bluesky_options
|
||||
if facebook_options:
|
||||
payload["faceBookOptions"] = facebook_options
|
||||
if gmb_options:
|
||||
payload["gmbOptions"] = gmb_options
|
||||
if instagram_options:
|
||||
payload["instagramOptions"] = instagram_options
|
||||
if linkedin_options:
|
||||
payload["linkedInOptions"] = linkedin_options
|
||||
if pinterest_options:
|
||||
payload["pinterestOptions"] = pinterest_options
|
||||
if reddit_options:
|
||||
payload["redditOptions"] = reddit_options
|
||||
if telegram_options:
|
||||
payload["telegramOptions"] = telegram_options
|
||||
if threads_options:
|
||||
payload["threadsOptions"] = threads_options
|
||||
if tiktok_options:
|
||||
payload["tikTokOptions"] = tiktok_options
|
||||
if twitter_options:
|
||||
payload["twitterOptions"] = twitter_options
|
||||
if youtube_options:
|
||||
payload["youTubeOptions"] = youtube_options
|
||||
if requires_approval is not None:
|
||||
payload["requiresApproval"] = requires_approval
|
||||
if random_post is not None:
|
||||
payload["randomPost"] = random_post
|
||||
if random_media_url is not None:
|
||||
payload["randomMediaUrl"] = random_media_url
|
||||
if idempotency_key:
|
||||
payload["idempotencyKey"] = idempotency_key
|
||||
if notes:
|
||||
payload["notes"] = notes
|
||||
|
||||
headers = self.headers
|
||||
if profile_key:
|
||||
headers["Profile-Key"] = profile_key
|
||||
|
||||
response = self._requests.post(
|
||||
self.POST_ENDPOINT, json=payload, headers=headers
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
error_code = error_data.get("code", response.status_code)
|
||||
error_details = error_data.get("details", {})
|
||||
logger.error(error_data)
|
||||
return PostError(
|
||||
code=error_code,
|
||||
message=error_message,
|
||||
details=error_details,
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
# Return the first post from the response
|
||||
return PostResponse(**response_data["posts"][0])
|
||||
@@ -1,531 +0,0 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.blocks.ayrshare._api import (
|
||||
AyrshareClient,
|
||||
PostError,
|
||||
PostResponse,
|
||||
SocialPlatform,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.credentials_store import IntegrationCredentialsStore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
creads_store = IntegrationCredentialsStore()
|
||||
|
||||
|
||||
class RequestOutput(BaseModel):
|
||||
"""Base output model for Ayrshare social media posts."""
|
||||
|
||||
status: str = Field(..., description="Status of the post")
|
||||
id: str = Field(..., description="ID of the post")
|
||||
refId: str = Field(..., description="Reference ID of the post")
|
||||
profileTitle: str = Field(..., description="Title of the profile")
|
||||
post: str = Field(..., description="The post text")
|
||||
postIds: Optional[List[dict]] = Field(
|
||||
description="IDs of the posts on each platform"
|
||||
)
|
||||
scheduleDate: Optional[str] = Field(description="Scheduled date of the post")
|
||||
errors: Optional[List[str]] = Field(description="Any errors that occurred")
|
||||
|
||||
|
||||
class AyrsharePostBlockBase(Block):
|
||||
"""Base class for Ayrshare social media posting blocks."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Base input model for Ayrshare social media posts."""
|
||||
|
||||
post: str = SchemaField(
|
||||
description="The post text to be published", default="", advanced=False
|
||||
)
|
||||
media_urls: List[str] = SchemaField(
|
||||
description="Optional list of media URLs to include. Set is_video in advanced settings to true if you want to upload videos.",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
is_video: bool = SchemaField(
|
||||
description="Whether the media is a video", default=False, advanced=True
|
||||
)
|
||||
schedule_date: Optional[datetime] = SchemaField(
|
||||
description="UTC datetime for scheduling (YYYY-MM-DDThh:mm:ssZ)",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
disable_comments: bool = SchemaField(
|
||||
description="Whether to disable comments", default=False, advanced=True
|
||||
)
|
||||
shorten_links: bool = SchemaField(
|
||||
description="Whether to shorten links", default=False, advanced=True
|
||||
)
|
||||
|
||||
unsplash: Optional[str] = SchemaField(
|
||||
description="Unsplash image configuration", default=None, advanced=True
|
||||
)
|
||||
requires_approval: bool = SchemaField(
|
||||
description="Whether to enable approval workflow",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
random_post: bool = SchemaField(
|
||||
description="Whether to generate random post text",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
random_media_url: bool = SchemaField(
|
||||
description="Whether to generate random media", default=False, advanced=True
|
||||
)
|
||||
notes: Optional[str] = SchemaField(
|
||||
description="Additional notes for the post", default=None, advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
post_result: RequestOutput = SchemaField(description="The result of the post")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
id="b3a7b3b9-5169-410a-9d5c-fd625460fb14",
|
||||
description="Ayrshare Post",
|
||||
):
|
||||
super().__init__(
|
||||
# The unique identifier for the block, this value will be persisted in the DB.
|
||||
# It should be unique and constant across the application run.
|
||||
# Use the UUID format for the ID.
|
||||
id=id,
|
||||
# The description of the block, explaining what the block does.
|
||||
description=description,
|
||||
# The set of categories that the block belongs to.
|
||||
# Each category is an instance of BlockCategory Enum.
|
||||
categories={BlockCategory.SOCIAL},
|
||||
# The type of block, this is used to determine the block type in the UI.
|
||||
block_type=BlockType.AYRSHARE,
|
||||
# The schema, defined as a Pydantic model, for the input data.
|
||||
input_schema=AyrsharePostBlockBase.Input,
|
||||
# The schema, defined as a Pydantic model, for the output data.
|
||||
output_schema=AyrsharePostBlockBase.Output,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_client():
|
||||
return AyrshareClient()
|
||||
|
||||
def _create_post(
|
||||
self,
|
||||
input_data: "AyrsharePostBlockBase.Input",
|
||||
platforms: List[SocialPlatform],
|
||||
profile_key: Optional[str] = None,
|
||||
) -> PostResponse | PostError:
|
||||
client = self.create_client()
|
||||
"""Create a post on the specified platforms."""
|
||||
iso_date = (
|
||||
input_data.schedule_date.isoformat() if input_data.schedule_date else None
|
||||
)
|
||||
response = client.create_post(
|
||||
post=input_data.post,
|
||||
platforms=platforms,
|
||||
media_urls=input_data.media_urls,
|
||||
is_video=input_data.is_video,
|
||||
schedule_date=iso_date,
|
||||
disable_comments=input_data.disable_comments,
|
||||
shorten_links=input_data.shorten_links,
|
||||
unsplash=input_data.unsplash,
|
||||
requires_approval=input_data.requires_approval,
|
||||
random_post=input_data.random_post,
|
||||
random_media_url=input_data.random_media_url,
|
||||
notes=input_data.notes,
|
||||
profile_key=profile_key,
|
||||
)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: "AyrsharePostBlockBase.Input",
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Run the block."""
|
||||
platforms = [SocialPlatform.FACEBOOK]
|
||||
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data, platforms=platforms, profile_key=profile_key.get_secret_value()
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToFacebookBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Facebook."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3352f512-3524-49ed-a08f-003042da2fc1",
|
||||
description="Post to Facebook using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Facebook."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.FACEBOOK],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToXBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to X / Twitter."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9e8f844e-b4a5-4b25-80f2-9e1dd7d67625",
|
||||
description="Post to X / Twitter using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Twitter."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TWITTER],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToLinkedInBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to LinkedIn."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="589af4e4-507f-42fd-b9ac-a67ecef25811",
|
||||
description="Post to LinkedIn using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to LinkedIn."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.LINKEDIN],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToInstagramBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Instagram."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="89b02b96-a7cb-46f4-9900-c48b32fe1552",
|
||||
description="Post to Instagram using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Instagram."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.INSTAGRAM],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToYouTubeBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to YouTube."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0082d712-ff1b-4c3d-8a8d-6c7721883b83",
|
||||
description="Post to YouTube using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to YouTube."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.YOUTUBE],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToRedditBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Reddit."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c7733580-3c72-483e-8e47-a8d58754d853",
|
||||
description="Post to Reddit using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Reddit."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.REDDIT],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToTelegramBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Telegram."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="47bc74eb-4af2-452c-b933-af377c7287df",
|
||||
description="Post to Telegram using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Telegram."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TELEGRAM],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToGMBBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Google My Business."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2c38c783-c484-4503-9280-ef5d1d345a7e",
|
||||
description="Post to Google My Business using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Google My Business."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.GMB],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToPinterestBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Pinterest."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3ca46e05-dbaa-4afb-9e95-5a429c4177e6",
|
||||
description="Post to Pinterest using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Pinterest."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.PINTEREST],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToTikTokBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to TikTok."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7faf4b27-96b0-4f05-bf64-e0de54ae74e1",
|
||||
description="Post to TikTok using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to TikTok."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TIKTOK],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToBlueskyBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Bluesky."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="cbd52c2a-06d2-43ed-9560-6576cc163283",
|
||||
description="Post to Bluesky using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Bluesky."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.BLUESKY],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
AYRSHARE_NODE_IDS = [
|
||||
PostToBlueskyBlock().id,
|
||||
PostToFacebookBlock().id,
|
||||
PostToXBlock().id,
|
||||
PostToLinkedInBlock().id,
|
||||
PostToInstagramBlock().id,
|
||||
PostToYouTubeBlock().id,
|
||||
PostToRedditBlock().id,
|
||||
PostToTelegramBlock().id,
|
||||
PostToGMBBlock().id,
|
||||
PostToPinterestBlock().id,
|
||||
PostToTikTokBlock().id,
|
||||
]
|
||||
@@ -4,19 +4,22 @@ from typing import Any, List
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
from backend.util.file import store_media_file
|
||||
from backend.util.file import MediaFile, store_media_file
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.type import MediaFileType, convert
|
||||
from backend.util.text import TextFormatter
|
||||
from backend.util.type import convert
|
||||
|
||||
formatter = TextFormatter()
|
||||
|
||||
|
||||
class FileStoreBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
file_in: MediaFileType = SchemaField(
|
||||
file_in: MediaFile = SchemaField(
|
||||
description="The file to store in the temporary directory, it can be a URL, data URI, or local path."
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
file_out: MediaFileType = SchemaField(
|
||||
file_out: MediaFile = SchemaField(
|
||||
description="The relative path to the stored file in the temporary directory."
|
||||
)
|
||||
|
||||
@@ -90,10 +93,9 @@ class StoreValueBlock(Block):
|
||||
|
||||
class PrintToConsoleBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: Any = SchemaField(description="The data to print to the console.")
|
||||
text: str = SchemaField(description="The text to print to the console.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The data printed to the console.")
|
||||
status: str = SchemaField(description="The status of the print operation.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -104,14 +106,11 @@ class PrintToConsoleBlock(Block):
|
||||
input_schema=PrintToConsoleBlock.Input,
|
||||
output_schema=PrintToConsoleBlock.Output,
|
||||
test_input={"text": "Hello, World!"},
|
||||
test_output=[
|
||||
("output", "Hello, World!"),
|
||||
("status", "printed"),
|
||||
],
|
||||
test_output=("status", "printed"),
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "output", input_data.text
|
||||
print(">>>>> Print: ", input_data.text)
|
||||
yield "status", "printed"
|
||||
|
||||
|
||||
@@ -175,10 +174,192 @@ class FindInDictionaryBlock(Block):
|
||||
yield "missing", input_data.input
|
||||
|
||||
|
||||
class AgentInputBlock(Block):
|
||||
"""
|
||||
This block is used to provide input to the graph.
|
||||
|
||||
It takes in a value, name, description, default values list and bool to limit selection to default values.
|
||||
|
||||
It Outputs the value passed as input.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
value: Any = SchemaField(
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the input.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: List[Any] = SchemaField(
|
||||
description="The placeholder values to be passed as input.",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
limit_to_placeholder_values: bool = SchemaField(
|
||||
description="Whether to limit the selection to placeholder values.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to show the input in the advanced section, if the field is not required.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the input should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: Any = SchemaField(description="The value passed as input.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
description="This block is used to provide input to the graph.",
|
||||
input_schema=AgentInputBlock.Input,
|
||||
output_schema=AgentInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_1",
|
||||
"description": "This is a test input.",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
},
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_2",
|
||||
"description": "This is a test input.",
|
||||
"placeholder_values": ["Hello, World!"],
|
||||
"limit_to_placeholder_values": True,
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Hello, World!"),
|
||||
("result", "Hello, World!"),
|
||||
],
|
||||
categories={BlockCategory.INPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.INPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "result", input_data.value
|
||||
|
||||
|
||||
class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
Behavior:
|
||||
If `format` is provided and the `value` is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the `format`.
|
||||
If formatting fails or no `format` is provided, the raw `value` is output.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to be recorded as output.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value. Use Jinja2 syntax.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to treat the output as advanced.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the output should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
name: Any = SchemaField(description="The name of the value recorded as output.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
description="Stores the output of the graph for users to see.",
|
||||
input_schema=AgentOutputBlock.Input,
|
||||
output_schema=AgentOutputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "output_1",
|
||||
"description": "This is a test output.",
|
||||
"format": "{{ output_1 }}!!",
|
||||
},
|
||||
{
|
||||
"value": "42",
|
||||
"name": "output_2",
|
||||
"description": "This is another test output.",
|
||||
"format": "{{ output_2 }}",
|
||||
},
|
||||
{
|
||||
"value": MockObject(value="!!", key="key"),
|
||||
"name": "output_3",
|
||||
"description": "This is a test output with a mock object.",
|
||||
"format": "{{ output_3 }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World!!!"),
|
||||
("output", "42"),
|
||||
("output", MockObject(value="!!", key="key")),
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.OUTPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
"""
|
||||
Attempts to format the recorded_value using the fmt_string if provided.
|
||||
If formatting fails or no fmt_string is given, returns the original recorded_value.
|
||||
"""
|
||||
if input_data.format:
|
||||
try:
|
||||
yield "output", formatter.format_string(
|
||||
input_data.format, {input_data.name: input_data.value}
|
||||
)
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.value
|
||||
yield "name", input_data.name
|
||||
|
||||
|
||||
class AddToDictionaryBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
dictionary: dict[Any, Any] = SchemaField(
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="The dictionary to add the entry to. If not provided, a new dictionary will be created.",
|
||||
)
|
||||
key: str = SchemaField(
|
||||
@@ -194,7 +375,7 @@ class AddToDictionaryBlock(Block):
|
||||
advanced=False,
|
||||
)
|
||||
entries: dict[Any, Any] = SchemaField(
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="The entries to add to the dictionary. This is the batch version of the `key` and `value` fields.",
|
||||
advanced=True,
|
||||
)
|
||||
@@ -256,7 +437,7 @@ class AddToDictionaryBlock(Block):
|
||||
class AddToListBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
list: List[Any] = SchemaField(
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
description="The list to add the entry to. If not provided, a new list will be created.",
|
||||
)
|
||||
@@ -266,7 +447,7 @@ class AddToListBlock(Block):
|
||||
default=None,
|
||||
)
|
||||
entries: List[Any] = SchemaField(
|
||||
default_factory=lambda: list(),
|
||||
default=[],
|
||||
description="The entries to add to the list. This is the batch version of the `entry` field.",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
@@ -55,7 +55,7 @@ class CodeExecutionBlock(Block):
|
||||
"These commands are executed with `sh`, in the foreground."
|
||||
),
|
||||
placeholder="pip install cowsay",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -207,7 +207,7 @@ class InstantiationBlock(Block):
|
||||
"These commands are executed with `sh`, in the foreground."
|
||||
),
|
||||
placeholder="pip install cowsay",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ from backend.data.block import (
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks.compass import CompassWebhookType
|
||||
|
||||
|
||||
@@ -43,7 +42,7 @@ class CompassAITriggerBlock(Block):
|
||||
input_schema=CompassAITriggerBlock.Input,
|
||||
output_schema=CompassAITriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider=ProviderName.COMPASS,
|
||||
provider="compass",
|
||||
webhook_type=CompassWebhookType.TRANSCRIPTION,
|
||||
),
|
||||
test_input=[
|
||||
|
||||
@@ -34,7 +34,7 @@ class ReadCsvBlock(Block):
|
||||
)
|
||||
skip_columns: list[str] = SchemaField(
|
||||
description="The columns to skip from the start of the row",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -49,7 +49,7 @@ class ExaContentsBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of document contents",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
|
||||
@@ -38,11 +38,11 @@ class ExaSearchBlock(Block):
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
@@ -59,12 +59,12 @@ class ExaSearchBlock(Block):
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
@@ -76,7 +76,7 @@ class ExaSearchBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of search results",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -26,12 +26,12 @@ class ExaFindSimilarBlock(Block):
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
@@ -48,12 +48,12 @@ class ExaFindSimilarBlock(Block):
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include (max 1 string, up to 5 words)",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude (max 1 string, up to 5 words)",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
@@ -65,7 +65,7 @@ class ExaFindSimilarBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: List[Any] = SchemaField(
|
||||
description="List of similar documents with title, URL, published date, author, and score",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -42,7 +42,7 @@ class AIVideoGeneratorBlock(Block):
|
||||
description="Error message if video generation failed."
|
||||
)
|
||||
logs: list[str] = SchemaField(
|
||||
description="Generation progress logs.",
|
||||
description="Generation progress logs.", optional=True
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockManualWebhookConfig,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks.generic import GenericWebhookType
|
||||
|
||||
|
||||
class GenericWebhookTriggerBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
constants: dict = SchemaField(
|
||||
description="The constants to be set when the block is put on the graph",
|
||||
default_factory=dict,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload that was received from the generic webhook."
|
||||
)
|
||||
constants: dict = SchemaField(
|
||||
description="The constants to be set when the block is put on the graph"
|
||||
)
|
||||
|
||||
example_payload = {"message": "Hello, World!"}
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8fa8c167-2002-47ce-aba8-97572fc5d387",
|
||||
description="This block will output the contents of the generic input for the webhook.",
|
||||
categories={BlockCategory.INPUT},
|
||||
input_schema=GenericWebhookTriggerBlock.Input,
|
||||
output_schema=GenericWebhookTriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider=ProviderName.GENERIC_WEBHOOK,
|
||||
webhook_type=GenericWebhookType.PLAIN,
|
||||
),
|
||||
test_input={"constants": {"key": "value"}, "payload": self.example_payload},
|
||||
test_output=[
|
||||
("constants", {"key": "value"}),
|
||||
("payload", self.example_payload),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "constants", input_data.constants
|
||||
yield "payload", input_data.payload
|
||||
@@ -12,7 +12,6 @@ from backend.data.block import (
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
@@ -37,7 +36,7 @@ class GitHubTriggerBase:
|
||||
placeholder="{owner}/{repo}",
|
||||
)
|
||||
# --8<-- [start:example-payload-field]
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
# --8<-- [end:example-payload-field]
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -124,7 +123,7 @@ class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
output_schema=GithubPullRequestTriggerBlock.Output,
|
||||
# --8<-- [start:example-webhook_config]
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
provider="github",
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
|
||||
@@ -1,598 +0,0 @@
|
||||
import enum
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Literal
|
||||
|
||||
from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import build
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.settings import AppEnvironment, Settings
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GoogleCredentials,
|
||||
GoogleCredentialsField,
|
||||
GoogleCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class CalendarEvent(BaseModel):
|
||||
"""Structured representation of a Google Calendar event."""
|
||||
|
||||
id: str
|
||||
title: str
|
||||
start_time: str
|
||||
end_time: str
|
||||
is_all_day: bool
|
||||
location: str | None
|
||||
description: str | None
|
||||
organizer: str | None
|
||||
attendees: list[str]
|
||||
has_video_call: bool
|
||||
video_link: str | None
|
||||
calendar_link: str
|
||||
is_recurring: bool
|
||||
|
||||
|
||||
class GoogleCalendarReadEventsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/calendar.readonly"]
|
||||
)
|
||||
calendar_id: str = SchemaField(
|
||||
description="Calendar ID (use 'primary' for your main calendar)",
|
||||
default="primary",
|
||||
)
|
||||
max_events: int = SchemaField(
|
||||
description="Maximum number of events to retrieve", default=10
|
||||
)
|
||||
start_time: datetime = SchemaField(
|
||||
description="Retrieve events starting from this time",
|
||||
default_factory=lambda: datetime.now(tz=timezone.utc),
|
||||
)
|
||||
time_range_days: int = SchemaField(
|
||||
description="Number of days to look ahead for events", default=30
|
||||
)
|
||||
search_term: str | None = SchemaField(
|
||||
description="Optional search term to filter events by", default=None
|
||||
)
|
||||
|
||||
page_token: str | None = SchemaField(
|
||||
description="Page token from previous request to get the next batch of events. You can use this if you have lots of events you want to process in a loop",
|
||||
default=None,
|
||||
)
|
||||
include_declined_events: bool = SchemaField(
|
||||
description="Include events you've declined", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
events: list[CalendarEvent] = SchemaField(
|
||||
description="List of calendar events in the requested time range",
|
||||
default_factory=list,
|
||||
)
|
||||
event: CalendarEvent = SchemaField(
|
||||
description="One of the calendar events in the requested time range"
|
||||
)
|
||||
next_page_token: str | None = SchemaField(
|
||||
description="Token for retrieving the next page of events if more exist",
|
||||
default=None,
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the request failed",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
|
||||
# Create realistic test data for events
|
||||
test_now = datetime.now(tz=timezone.utc)
|
||||
test_tomorrow = test_now + timedelta(days=1)
|
||||
|
||||
test_event_dict = {
|
||||
"id": "event1id",
|
||||
"title": "Team Meeting",
|
||||
"start_time": test_tomorrow.strftime("%Y-%m-%d %H:%M"),
|
||||
"end_time": (test_tomorrow + timedelta(hours=1)).strftime("%Y-%m-%d %H:%M"),
|
||||
"is_all_day": False,
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync",
|
||||
"organizer": "manager@example.com",
|
||||
"attendees": ["colleague1@example.com", "colleague2@example.com"],
|
||||
"has_video_call": True,
|
||||
"video_link": "https://meet.google.com/abc-defg-hij",
|
||||
"calendar_link": "https://calendar.google.com/calendar/event?eid=event1id",
|
||||
"is_recurring": True,
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
id="80bc3ed1-e9a4-449e-8163-a8fc86f74f6a",
|
||||
description="Retrieves upcoming events from a Google Calendar with filtering options",
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.DATA},
|
||||
input_schema=GoogleCalendarReadEventsBlock.Input,
|
||||
output_schema=GoogleCalendarReadEventsBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"calendar_id": "primary",
|
||||
"max_events": 5,
|
||||
"start_time": test_now.isoformat(),
|
||||
"time_range_days": 7,
|
||||
"search_term": None,
|
||||
"include_declined_events": False,
|
||||
"page_token": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("event", test_event_dict),
|
||||
("events", [test_event_dict]),
|
||||
],
|
||||
test_mock={
|
||||
"_read_calendar": lambda *args, **kwargs: {
|
||||
"items": [
|
||||
{
|
||||
"id": "event1id",
|
||||
"summary": "Team Meeting",
|
||||
"start": {
|
||||
"dateTime": test_tomorrow.isoformat(),
|
||||
"timeZone": "UTC",
|
||||
},
|
||||
"end": {
|
||||
"dateTime": (
|
||||
test_tomorrow + timedelta(hours=1)
|
||||
).isoformat(),
|
||||
"timeZone": "UTC",
|
||||
},
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync",
|
||||
"organizer": {"email": "manager@example.com"},
|
||||
"attendees": [
|
||||
{"email": "colleague1@example.com"},
|
||||
{"email": "colleague2@example.com"},
|
||||
],
|
||||
"conferenceData": {
|
||||
"conferenceUrl": "https://meet.google.com/abc-defg-hij"
|
||||
},
|
||||
"htmlLink": "https://calendar.google.com/calendar/event?eid=event1id",
|
||||
"recurrence": ["RRULE:FREQ=WEEKLY;COUNT=10"],
|
||||
}
|
||||
],
|
||||
"nextPageToken": None,
|
||||
},
|
||||
"_format_events": lambda *args, **kwargs: [test_event_dict],
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
|
||||
# Calculate end time based on start time and time range
|
||||
end_time = input_data.start_time + timedelta(
|
||||
days=input_data.time_range_days
|
||||
)
|
||||
|
||||
# Call Google Calendar API
|
||||
result = self._read_calendar(
|
||||
service=service,
|
||||
calendarId=input_data.calendar_id,
|
||||
time_min=input_data.start_time.isoformat(),
|
||||
time_max=end_time.isoformat(),
|
||||
max_results=input_data.max_events,
|
||||
single_events=True,
|
||||
search_term=input_data.search_term,
|
||||
show_deleted=False,
|
||||
show_hidden=input_data.include_declined_events,
|
||||
page_token=input_data.page_token,
|
||||
)
|
||||
|
||||
# Format events into a user-friendly structure
|
||||
formatted_events = self._format_events(result.get("items", []))
|
||||
|
||||
# Include next page token if available
|
||||
if next_page_token := result.get("nextPageToken"):
|
||||
yield "next_page_token", next_page_token
|
||||
|
||||
for event in formatted_events:
|
||||
yield "event", event
|
||||
|
||||
yield "events", formatted_events
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("calendar", "v3", credentials=creds)
|
||||
|
||||
def _read_calendar(
|
||||
self,
|
||||
service,
|
||||
calendarId: str,
|
||||
time_min: str,
|
||||
time_max: str,
|
||||
max_results: int,
|
||||
single_events: bool,
|
||||
search_term: str | None = None,
|
||||
show_deleted: bool = False,
|
||||
show_hidden: bool = False,
|
||||
page_token: str | None = None,
|
||||
) -> dict:
|
||||
"""Read calendar events with optional filtering."""
|
||||
calendar = service.events()
|
||||
|
||||
# Build query parameters
|
||||
params = {
|
||||
"calendarId": calendarId,
|
||||
"timeMin": time_min,
|
||||
"timeMax": time_max,
|
||||
"maxResults": max_results,
|
||||
"singleEvents": single_events,
|
||||
"orderBy": "startTime",
|
||||
"showDeleted": show_deleted,
|
||||
"showHiddenInvitations": show_hidden,
|
||||
**({"pageToken": page_token} if page_token else {}),
|
||||
}
|
||||
|
||||
# Add search term if provided
|
||||
if search_term:
|
||||
params["q"] = search_term
|
||||
|
||||
result = calendar.list(**params).execute()
|
||||
return result
|
||||
|
||||
def _format_events(self, events: list[dict]) -> list[CalendarEvent]:
|
||||
"""Format Google Calendar API events into user-friendly structure."""
|
||||
formatted_events = []
|
||||
|
||||
for event in events:
|
||||
# Determine if all-day event
|
||||
is_all_day = "date" in event.get("start", {})
|
||||
|
||||
# Format start and end times
|
||||
if is_all_day:
|
||||
start_time = event.get("start", {}).get("date", "")
|
||||
end_time = event.get("end", {}).get("date", "")
|
||||
else:
|
||||
# Convert ISO format to more readable format
|
||||
start_datetime = datetime.fromisoformat(
|
||||
event.get("start", {}).get("dateTime", "").replace("Z", "+00:00")
|
||||
)
|
||||
end_datetime = datetime.fromisoformat(
|
||||
event.get("end", {}).get("dateTime", "").replace("Z", "+00:00")
|
||||
)
|
||||
start_time = start_datetime.strftime("%Y-%m-%d %H:%M")
|
||||
end_time = end_datetime.strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
# Extract attendees
|
||||
attendees = []
|
||||
for attendee in event.get("attendees", []):
|
||||
if email := attendee.get("email"):
|
||||
attendees.append(email)
|
||||
|
||||
# Check for video call link
|
||||
has_video_call = False
|
||||
video_link = None
|
||||
if conf_data := event.get("conferenceData"):
|
||||
if conf_url := conf_data.get("conferenceUrl"):
|
||||
has_video_call = True
|
||||
video_link = conf_url
|
||||
elif entry_points := conf_data.get("entryPoints", []):
|
||||
for entry in entry_points:
|
||||
if entry.get("entryPointType") == "video":
|
||||
has_video_call = True
|
||||
video_link = entry.get("uri")
|
||||
break
|
||||
|
||||
# Create formatted event
|
||||
formatted_event = CalendarEvent(
|
||||
id=event.get("id", ""),
|
||||
title=event.get("summary", "Untitled Event"),
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
is_all_day=is_all_day,
|
||||
location=event.get("location"),
|
||||
description=event.get("description"),
|
||||
organizer=event.get("organizer", {}).get("email"),
|
||||
attendees=attendees,
|
||||
has_video_call=has_video_call,
|
||||
video_link=video_link,
|
||||
calendar_link=event.get("htmlLink", ""),
|
||||
is_recurring=bool(event.get("recurrence")),
|
||||
)
|
||||
|
||||
formatted_events.append(formatted_event)
|
||||
|
||||
return formatted_events
|
||||
|
||||
|
||||
class ReminderPreset(enum.Enum):
|
||||
"""Common reminder times before an event."""
|
||||
|
||||
TEN_MINUTES = 10
|
||||
THIRTY_MINUTES = 30
|
||||
ONE_HOUR = 60
|
||||
ONE_DAY = 1440 # 24 hours in minutes
|
||||
|
||||
|
||||
class RecurrenceFrequency(enum.Enum):
|
||||
"""Frequency options for recurring events."""
|
||||
|
||||
DAILY = "DAILY"
|
||||
WEEKLY = "WEEKLY"
|
||||
MONTHLY = "MONTHLY"
|
||||
YEARLY = "YEARLY"
|
||||
|
||||
|
||||
class ExactTiming(BaseModel):
|
||||
"""Model for specifying start and end times."""
|
||||
|
||||
discriminator: Literal["exact_timing"]
|
||||
start_datetime: datetime
|
||||
end_datetime: datetime
|
||||
|
||||
|
||||
class DurationTiming(BaseModel):
|
||||
"""Model for specifying start time and duration."""
|
||||
|
||||
discriminator: Literal["duration_timing"]
|
||||
start_datetime: datetime
|
||||
duration_minutes: int
|
||||
|
||||
|
||||
class OneTimeEvent(BaseModel):
|
||||
"""Model for a one-time event."""
|
||||
|
||||
discriminator: Literal["one_time"]
|
||||
|
||||
|
||||
class RecurringEvent(BaseModel):
|
||||
"""Model for a recurring event."""
|
||||
|
||||
discriminator: Literal["recurring"]
|
||||
frequency: RecurrenceFrequency
|
||||
count: int
|
||||
|
||||
|
||||
class GoogleCalendarCreateEventBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/calendar"]
|
||||
)
|
||||
# Event Details
|
||||
event_title: str = SchemaField(description="Title of the event")
|
||||
location: str | None = SchemaField(
|
||||
description="Location of the event", default=None
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="Description of the event", default=None
|
||||
)
|
||||
|
||||
# Timing
|
||||
timing: ExactTiming | DurationTiming = SchemaField(
|
||||
discriminator="discriminator",
|
||||
advanced=False,
|
||||
description="Specify when the event starts and ends",
|
||||
default_factory=lambda: DurationTiming(
|
||||
discriminator="duration_timing",
|
||||
start_datetime=datetime.now().replace(microsecond=0, second=0, minute=0)
|
||||
+ timedelta(hours=1),
|
||||
duration_minutes=60,
|
||||
),
|
||||
)
|
||||
|
||||
# Calendar selection
|
||||
calendar_id: str = SchemaField(
|
||||
description="Calendar ID (use 'primary' for your main calendar)",
|
||||
default="primary",
|
||||
)
|
||||
|
||||
# Guests
|
||||
guest_emails: list[str] = SchemaField(
|
||||
description="Email addresses of guests to invite", default_factory=list
|
||||
)
|
||||
send_notifications: bool = SchemaField(
|
||||
description="Send email notifications to guests", default=True
|
||||
)
|
||||
|
||||
# Extras
|
||||
add_google_meet: bool = SchemaField(
|
||||
description="Include a Google Meet video conference link", default=False
|
||||
)
|
||||
recurrence: OneTimeEvent | RecurringEvent = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Whether the event repeats",
|
||||
default_factory=lambda: OneTimeEvent(discriminator="one_time"),
|
||||
)
|
||||
reminder_minutes: list[ReminderPreset] = SchemaField(
|
||||
description="When to send reminders before the event",
|
||||
default_factory=lambda: [ReminderPreset.TEN_MINUTES],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
event_id: str = SchemaField(description="ID of the created event")
|
||||
event_link: str = SchemaField(
|
||||
description="Link to view the event in Google Calendar"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if event creation failed")
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
|
||||
super().__init__(
|
||||
id="ed2ec950-fbff-4204-94c0-023fb1d625e0",
|
||||
description="This block creates a new event in Google Calendar with customizable parameters.",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=GoogleCalendarCreateEventBlock.Input,
|
||||
output_schema=GoogleCalendarCreateEventBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"event_title": "Team Meeting",
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync-up",
|
||||
"calendar_id": "primary",
|
||||
"guest_emails": ["colleague1@example.com", "colleague2@example.com"],
|
||||
"add_google_meet": True,
|
||||
"send_notifications": True,
|
||||
"reminder_minutes": [
|
||||
ReminderPreset.TEN_MINUTES.value,
|
||||
ReminderPreset.ONE_HOUR.value,
|
||||
],
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("event_id", "abc123event_id"),
|
||||
("event_link", "https://calendar.google.com/calendar/event?eid=abc123"),
|
||||
],
|
||||
test_mock={
|
||||
"_create_event": lambda *args, **kwargs: {
|
||||
"id": "abc123event_id",
|
||||
"htmlLink": "https://calendar.google.com/calendar/event?eid=abc123",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
|
||||
# Get start and end times based on the timing option
|
||||
if input_data.timing.discriminator == "exact_timing":
|
||||
start_datetime = input_data.timing.start_datetime
|
||||
end_datetime = input_data.timing.end_datetime
|
||||
else: # duration_timing
|
||||
start_datetime = input_data.timing.start_datetime
|
||||
end_datetime = start_datetime + timedelta(
|
||||
minutes=input_data.timing.duration_minutes
|
||||
)
|
||||
|
||||
# Format datetimes for Google Calendar API
|
||||
start_time_str = start_datetime.isoformat()
|
||||
end_time_str = end_datetime.isoformat()
|
||||
|
||||
# Build the event body
|
||||
event_body = {
|
||||
"summary": input_data.event_title,
|
||||
"start": {"dateTime": start_time_str},
|
||||
"end": {"dateTime": end_time_str},
|
||||
}
|
||||
|
||||
# Add optional fields
|
||||
if input_data.location:
|
||||
event_body["location"] = input_data.location
|
||||
|
||||
if input_data.description:
|
||||
event_body["description"] = input_data.description
|
||||
|
||||
# Add guests
|
||||
if input_data.guest_emails:
|
||||
event_body["attendees"] = [
|
||||
{"email": email} for email in input_data.guest_emails
|
||||
]
|
||||
|
||||
# Add reminders
|
||||
if input_data.reminder_minutes:
|
||||
event_body["reminders"] = {
|
||||
"useDefault": False,
|
||||
"overrides": [
|
||||
{"method": "popup", "minutes": reminder.value}
|
||||
for reminder in input_data.reminder_minutes
|
||||
],
|
||||
}
|
||||
|
||||
# Add Google Meet
|
||||
if input_data.add_google_meet:
|
||||
event_body["conferenceData"] = {
|
||||
"createRequest": {
|
||||
"requestId": f"meet-{uuid.uuid4()}",
|
||||
"conferenceSolutionKey": {"type": "hangoutsMeet"},
|
||||
}
|
||||
}
|
||||
|
||||
# Add recurrence
|
||||
if input_data.recurrence.discriminator == "recurring":
|
||||
rule = f"RRULE:FREQ={input_data.recurrence.frequency.value}"
|
||||
rule += f";COUNT={input_data.recurrence.count}"
|
||||
event_body["recurrence"] = [rule]
|
||||
|
||||
# Create the event
|
||||
result = self._create_event(
|
||||
service=service,
|
||||
calendar_id=input_data.calendar_id,
|
||||
event_body=event_body,
|
||||
send_notifications=input_data.send_notifications,
|
||||
conference_data_version=1 if input_data.add_google_meet else 0,
|
||||
)
|
||||
|
||||
yield "event_id", result.get("id", "")
|
||||
yield "event_link", result.get("htmlLink", "")
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("calendar", "v3", credentials=creds)
|
||||
|
||||
def _create_event(
|
||||
self,
|
||||
service,
|
||||
calendar_id: str,
|
||||
event_body: dict,
|
||||
send_notifications: bool = False,
|
||||
conference_data_version: int = 0,
|
||||
) -> dict:
|
||||
"""Create a new event in Google Calendar."""
|
||||
calendar = service.events()
|
||||
|
||||
# Make the API call
|
||||
result = calendar.insert(
|
||||
calendarId=calendar_id,
|
||||
body=event_body,
|
||||
sendNotifications=send_notifications,
|
||||
conferenceDataVersion=conference_data_version,
|
||||
).execute()
|
||||
|
||||
return result
|
||||
@@ -3,7 +3,7 @@ from googleapiclient.discovery import build
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.settings import AppEnvironment, Settings
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
@@ -36,15 +36,13 @@ class GoogleSheetsReadBlock(Block):
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
super().__init__(
|
||||
id="5724e902-3635-47e9-a108-aaa0263a4988",
|
||||
description="This block reads data from a Google Sheets spreadsheet.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=GoogleSheetsReadBlock.Input,
|
||||
output_schema=GoogleSheetsReadBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"spreadsheet_id": "1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms",
|
||||
"range": "Sheet1!A1:B2",
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
import json
|
||||
import logging
|
||||
from enum import Enum
|
||||
from io import BufferedReader
|
||||
from typing import Any
|
||||
|
||||
from requests.exceptions import HTTPError, RequestException
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
|
||||
from backend.util.request import requests
|
||||
|
||||
logger = logging.getLogger(name=__name__)
|
||||
@@ -36,7 +34,7 @@ class SendWebRequestBlock(Block):
|
||||
)
|
||||
headers: dict[str, str] = SchemaField(
|
||||
description="The headers to include in the request",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
json_format: bool = SchemaField(
|
||||
title="JSON format",
|
||||
@@ -47,10 +45,6 @@ class SendWebRequestBlock(Block):
|
||||
description="The body of the request",
|
||||
default=None,
|
||||
)
|
||||
files: dict[str, MediaFileType] = SchemaField(
|
||||
description="File fields mapping to MediaFileType for multipart upload",
|
||||
default_factory=dict,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: object = SchemaField(description="The response from the server")
|
||||
@@ -67,7 +61,7 @@ class SendWebRequestBlock(Block):
|
||||
output_schema=SendWebRequestBlock.Output,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, graph_exec_id: str, **kwargs) -> BlockOutput:
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
body = input_data.body
|
||||
|
||||
if input_data.json_format:
|
||||
@@ -80,43 +74,15 @@ class SendWebRequestBlock(Block):
|
||||
# we should send it as plain text instead
|
||||
input_data.json_format = False
|
||||
|
||||
# Prepare files for multipart upload using store_media_file
|
||||
files: dict[str, BufferedReader] = {}
|
||||
if input_data.files:
|
||||
for field_name, media in input_data.files.items():
|
||||
try:
|
||||
rel_path = store_media_file(
|
||||
graph_exec_id, media, return_content=False
|
||||
)
|
||||
abs_path = get_exec_file_path(graph_exec_id, rel_path)
|
||||
files[field_name] = open(abs_path, "rb")
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to prepare file '{field_name}': {e}"
|
||||
for f in files.values():
|
||||
try:
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
try:
|
||||
response = requests.request(
|
||||
input_data.method.value,
|
||||
input_data.url,
|
||||
headers=input_data.headers,
|
||||
files=files if files else None,
|
||||
json=body if input_data.json_format else None,
|
||||
data=body if not input_data.json_format else None,
|
||||
)
|
||||
|
||||
if input_data.json_format:
|
||||
if response.status_code == 204 or not response.content.strip():
|
||||
result = None
|
||||
else:
|
||||
result = response.json()
|
||||
else:
|
||||
result = response.text
|
||||
|
||||
result = response.json() if input_data.json_format else response.text
|
||||
yield "response", result
|
||||
|
||||
except HTTPError as e:
|
||||
@@ -145,11 +111,3 @@ class SendWebRequestBlock(Block):
|
||||
except Exception as e:
|
||||
# Catch any other unexpected exceptions
|
||||
yield "error", str(e)
|
||||
|
||||
finally:
|
||||
# ensure cleanup of file handles
|
||||
for f in files.values():
|
||||
try:
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
@@ -15,8 +15,7 @@ class HubSpotCompanyBlock(Block):
|
||||
description="Operation to perform (create, update, get)", default="get"
|
||||
)
|
||||
company_data: dict = SchemaField(
|
||||
description="Company data for create/update operations",
|
||||
default_factory=dict,
|
||||
description="Company data for create/update operations", default={}
|
||||
)
|
||||
domain: str = SchemaField(
|
||||
description="Company domain for get/update operations", default=""
|
||||
|
||||
@@ -15,8 +15,7 @@ class HubSpotContactBlock(Block):
|
||||
description="Operation to perform (create, update, get)", default="get"
|
||||
)
|
||||
contact_data: dict = SchemaField(
|
||||
description="Contact data for create/update operations",
|
||||
default_factory=dict,
|
||||
description="Contact data for create/update operations", default={}
|
||||
)
|
||||
email: str = SchemaField(
|
||||
description="Email address for get/update operations", default=""
|
||||
|
||||
@@ -19,7 +19,7 @@ class HubSpotEngagementBlock(Block):
|
||||
)
|
||||
email_data: dict = SchemaField(
|
||||
description="Email data including recipient, subject, content",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
contact_id: str = SchemaField(
|
||||
description="Contact ID for engagement tracking", default=""
|
||||
@@ -27,6 +27,7 @@ class HubSpotEngagementBlock(Block):
|
||||
timeframe_days: int = SchemaField(
|
||||
description="Number of days to look back for engagement",
|
||||
default=30,
|
||||
optional=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -1,556 +0,0 @@
|
||||
import copy
|
||||
from datetime import date, time
|
||||
from typing import Any, Optional
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import store_media_file
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.settings import Config
|
||||
from backend.util.text import TextFormatter
|
||||
from backend.util.type import LongTextType, MediaFileType, ShortTextType
|
||||
|
||||
formatter = TextFormatter()
|
||||
config = Config()
|
||||
|
||||
|
||||
class AgentInputBlock(Block):
|
||||
"""
|
||||
This block is used to provide input to the graph.
|
||||
|
||||
It takes in a value, name, description, default values list and bool to limit selection to default values.
|
||||
|
||||
It Outputs the value passed as input.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
value: Any = SchemaField(
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the input.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: list = SchemaField(
|
||||
description="The placeholder values to be passed as input.",
|
||||
default_factory=list,
|
||||
advanced=True,
|
||||
hidden=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to show the input in the advanced section, if the field is not required.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the input should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
def generate_schema(self):
|
||||
schema = copy.deepcopy(self.get_field_schema("value"))
|
||||
if possible_values := self.placeholder_values:
|
||||
schema["enum"] = possible_values
|
||||
return schema
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: Any = SchemaField(description="The value passed as input.")
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(
|
||||
**{
|
||||
"id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"description": "Base block for user inputs.",
|
||||
"input_schema": AgentInputBlock.Input,
|
||||
"output_schema": AgentInputBlock.Output,
|
||||
"test_input": [
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_1",
|
||||
"description": "Example test input.",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_2",
|
||||
"description": "Example test input with placeholders.",
|
||||
"placeholder_values": ["Hello, World!"],
|
||||
},
|
||||
],
|
||||
"test_output": [
|
||||
("result", "Hello, World!"),
|
||||
("result", "Hello, World!"),
|
||||
],
|
||||
"categories": {BlockCategory.INPUT, BlockCategory.BASIC},
|
||||
"block_type": BlockType.INPUT,
|
||||
"static_output": True,
|
||||
**kwargs,
|
||||
}
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *args, **kwargs) -> BlockOutput:
|
||||
if input_data.value is not None:
|
||||
yield "result", input_data.value
|
||||
|
||||
|
||||
class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
Behavior:
|
||||
If `format` is provided and the `value` is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the `format`.
|
||||
If formatting fails or no `format` is provided, the raw `value` is output.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to be recorded as output.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value. Use Jinja2 syntax.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to treat the output as advanced.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the output should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
def generate_schema(self):
|
||||
return self.get_field_schema("value")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
name: Any = SchemaField(description="The name of the value recorded as output.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
description="Stores the output of the graph for users to see.",
|
||||
input_schema=AgentOutputBlock.Input,
|
||||
output_schema=AgentOutputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "output_1",
|
||||
"description": "This is a test output.",
|
||||
"format": "{{ output_1 }}!!",
|
||||
},
|
||||
{
|
||||
"value": "42",
|
||||
"name": "output_2",
|
||||
"description": "This is another test output.",
|
||||
"format": "{{ output_2 }}",
|
||||
},
|
||||
{
|
||||
"value": MockObject(value="!!", key="key"),
|
||||
"name": "output_3",
|
||||
"description": "This is a test output with a mock object.",
|
||||
"format": "{{ output_3 }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World!!!"),
|
||||
("output", "42"),
|
||||
("output", MockObject(value="!!", key="key")),
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.OUTPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *args, **kwargs) -> BlockOutput:
|
||||
"""
|
||||
Attempts to format the recorded_value using the fmt_string if provided.
|
||||
If formatting fails or no fmt_string is given, returns the original recorded_value.
|
||||
"""
|
||||
if input_data.format:
|
||||
try:
|
||||
yield "output", formatter.format_string(
|
||||
input_data.format, {input_data.name: input_data.value}
|
||||
)
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.value
|
||||
yield "name", input_data.name
|
||||
|
||||
|
||||
class AgentShortTextInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[ShortTextType] = SchemaField(
|
||||
description="Short text input.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Short text result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
|
||||
description="Block for short text input (single-line).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentShortTextInputBlock.Input,
|
||||
output_schema=AgentShortTextInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello",
|
||||
"name": "short_text_1",
|
||||
"description": "Short text example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Quick test",
|
||||
"name": "short_text_2",
|
||||
"description": "Short text example 2",
|
||||
"placeholder_values": ["Quick test", "Another option"],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Hello"),
|
||||
("result", "Quick test"),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentLongTextInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[LongTextType] = SchemaField(
|
||||
description="Long text input (potentially multi-line).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Long text result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="90a56ffb-7024-4b2b-ab50-e26c5e5ab8ba",
|
||||
description="Block for long text input (multi-line).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentLongTextInputBlock.Input,
|
||||
output_schema=AgentLongTextInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Lorem ipsum dolor sit amet...",
|
||||
"name": "long_text_1",
|
||||
"description": "Long text example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Another multiline text input.",
|
||||
"name": "long_text_2",
|
||||
"description": "Long text example 2",
|
||||
"placeholder_values": ["Another multiline text input."],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Lorem ipsum dolor sit amet..."),
|
||||
("result", "Another multiline text input."),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentNumberInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[int] = SchemaField(
|
||||
description="Number input.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: int = SchemaField(description="Number result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="96dae2bb-97a2-41c2-bd2f-13a3b5a8ea98",
|
||||
description="Block for number input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentNumberInputBlock.Input,
|
||||
output_schema=AgentNumberInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": 42,
|
||||
"name": "number_input_1",
|
||||
"description": "Number example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": 314,
|
||||
"name": "number_input_2",
|
||||
"description": "Number example 2",
|
||||
"placeholder_values": [314, 2718],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", 42),
|
||||
("result", 314),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentDateInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[date] = SchemaField(
|
||||
description="Date input (YYYY-MM-DD).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: date = SchemaField(description="Date result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7e198b09-4994-47db-8b4d-952d98241817",
|
||||
description="Block for date input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentDateInputBlock.Input,
|
||||
output_schema=AgentDateInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
# If your system can parse JSON date strings to date objects
|
||||
"value": str(date(2025, 3, 19)),
|
||||
"name": "date_input_1",
|
||||
"description": "Example date input 1",
|
||||
},
|
||||
{
|
||||
"value": str(date(2023, 12, 31)),
|
||||
"name": "date_input_2",
|
||||
"description": "Example date input 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", date(2025, 3, 19)),
|
||||
("result", date(2023, 12, 31)),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentTimeInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[time] = SchemaField(
|
||||
description="Time input (HH:MM:SS).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: time = SchemaField(description="Time result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2a1c757e-86cf-4c7e-aacf-060dc382e434",
|
||||
description="Block for time input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentTimeInputBlock.Input,
|
||||
output_schema=AgentTimeInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": str(time(9, 30, 0)),
|
||||
"name": "time_input_1",
|
||||
"description": "Time example 1",
|
||||
},
|
||||
{
|
||||
"value": str(time(23, 59, 59)),
|
||||
"name": "time_input_2",
|
||||
"description": "Time example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", time(9, 30, 0)),
|
||||
("result", time(23, 59, 59)),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentFileInputBlock(AgentInputBlock):
|
||||
"""
|
||||
A simplified file-upload block. In real usage, you might have a custom
|
||||
file type or handle binary data. Here, we'll store a string path as the example.
|
||||
"""
|
||||
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[MediaFileType] = SchemaField(
|
||||
description="Path or reference to an uploaded file.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="File reference/path result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="95ead23f-8283-4654-aef3-10c053b74a31",
|
||||
description="Block for file upload input (string path for example).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentFileInputBlock.Input,
|
||||
output_schema=AgentFileInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "data:image/png;base64,MQ==",
|
||||
"name": "file_upload_1",
|
||||
"description": "Example file upload 1",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", str),
|
||||
],
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
if not input_data.value:
|
||||
return
|
||||
|
||||
file_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.value,
|
||||
return_content=False,
|
||||
)
|
||||
yield "result", file_path
|
||||
|
||||
|
||||
class AgentDropdownInputBlock(AgentInputBlock):
|
||||
"""
|
||||
A specialized text input block that relies on placeholder_values to present a dropdown.
|
||||
"""
|
||||
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[str] = SchemaField(
|
||||
description="Text selected from a dropdown.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
placeholder_values: list = SchemaField(
|
||||
description="Possible values for the dropdown.",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
title="Dropdown Options",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Selected dropdown value.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="655d6fdf-a334-421c-b733-520549c07cd1",
|
||||
description="Block for dropdown text selection.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentDropdownInputBlock.Input,
|
||||
output_schema=AgentDropdownInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Option A",
|
||||
"name": "dropdown_1",
|
||||
"placeholder_values": ["Option A", "Option B", "Option C"],
|
||||
"description": "Dropdown example 1",
|
||||
},
|
||||
{
|
||||
"value": "Option C",
|
||||
"name": "dropdown_2",
|
||||
"placeholder_values": ["Option A", "Option B", "Option C"],
|
||||
"description": "Dropdown example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Option A"),
|
||||
("result", "Option C"),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentToggleInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: bool = SchemaField(
|
||||
description="Boolean toggle input.",
|
||||
default=False,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: bool = SchemaField(description="Boolean toggle result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="cbf36ab5-df4a-43b6-8a7f-f7ed8652116e",
|
||||
description="Block for boolean toggle input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentToggleInputBlock.Input,
|
||||
output_schema=AgentToggleInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": True,
|
||||
"name": "toggle_1",
|
||||
"description": "Toggle example 1",
|
||||
},
|
||||
{
|
||||
"value": False,
|
||||
"name": "toggle_2",
|
||||
"description": "Toggle example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", True),
|
||||
("result", False),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
IO_BLOCK_IDs = [
|
||||
AgentInputBlock().id,
|
||||
AgentOutputBlock().id,
|
||||
AgentShortTextInputBlock().id,
|
||||
AgentLongTextInputBlock().id,
|
||||
AgentNumberInputBlock().id,
|
||||
AgentDateInputBlock().id,
|
||||
AgentTimeInputBlock().id,
|
||||
AgentFileInputBlock().id,
|
||||
AgentDropdownInputBlock().id,
|
||||
AgentToggleInputBlock().id,
|
||||
]
|
||||
@@ -11,13 +11,13 @@ class StepThroughItemsBlock(Block):
|
||||
advanced=False,
|
||||
description="The list or dictionary of items to iterate over",
|
||||
placeholder="[1, 2, 3, 4, 5] or {'key1': 'value1', 'key2': 'value2'}",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
items_object: dict = SchemaField(
|
||||
advanced=False,
|
||||
description="The list or dictionary of items to iterate over",
|
||||
placeholder="[1, 2, 3, 4, 5] or {'key1': 'value1', 'key2': 'value2'}",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
items_str: str = SchemaField(
|
||||
advanced=False,
|
||||
|
||||
@@ -23,7 +23,7 @@ class JinaChunkingBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
chunks: list = SchemaField(description="List of chunked texts")
|
||||
tokens: list = SchemaField(
|
||||
description="List of token information for each chunk",
|
||||
description="List of token information for each chunk", optional=True
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from urllib.parse import quote
|
||||
from groq._utils._utils import quote
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
|
||||
@@ -28,8 +28,8 @@ class LinearCreateIssueBlock(Block):
|
||||
priority: int | None = SchemaField(
|
||||
description="Priority of the issue",
|
||||
default=None,
|
||||
ge=0,
|
||||
le=4,
|
||||
minimum=0,
|
||||
maximum=4,
|
||||
)
|
||||
project_name: str | None = SchemaField(
|
||||
description="Name of the project to create the issue on",
|
||||
|
||||
@@ -4,30 +4,35 @@ from abc import ABC
|
||||
from enum import Enum, EnumMeta
|
||||
from json import JSONDecodeError
|
||||
from types import MappingProxyType
|
||||
from typing import Any, Iterable, List, Literal, NamedTuple, Optional
|
||||
from typing import TYPE_CHECKING, Any, Iterable, List, Literal, NamedTuple, Optional
|
||||
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.model import NodeExecutionStats
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from enum import _EnumMemberT
|
||||
|
||||
import anthropic
|
||||
import ollama
|
||||
import openai
|
||||
from anthropic._types import NotGiven
|
||||
from anthropic.types import ToolParam
|
||||
from groq import Groq
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
NodeExecutionStats,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util import json
|
||||
from backend.util.logging import TruncatedLogger
|
||||
from backend.util.settings import BehaveAs, Settings
|
||||
from backend.util.text import TextFormatter
|
||||
|
||||
logger = TruncatedLogger(logging.getLogger(__name__), "[LLM-Block]")
|
||||
logger = logging.getLogger(__name__)
|
||||
fmt = TextFormatter()
|
||||
|
||||
LLMProviderName = Literal[
|
||||
@@ -36,7 +41,6 @@ LLMProviderName = Literal[
|
||||
ProviderName.OLLAMA,
|
||||
ProviderName.OPENAI,
|
||||
ProviderName.OPEN_ROUTER,
|
||||
ProviderName.LLAMA_API,
|
||||
]
|
||||
AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]]
|
||||
|
||||
@@ -73,10 +77,12 @@ class ModelMetadata(NamedTuple):
|
||||
|
||||
class LlmModelMeta(EnumMeta):
|
||||
@property
|
||||
def __members__(self) -> MappingProxyType:
|
||||
def __members__(
|
||||
self: type["_EnumMemberT"],
|
||||
) -> MappingProxyType[str, "_EnumMemberT"]:
|
||||
if Settings().config.behave_as == BehaveAs.LOCAL:
|
||||
members = super().__members__
|
||||
return MappingProxyType(members)
|
||||
return members
|
||||
else:
|
||||
removed_providers = ["ollama"]
|
||||
existing_members = super().__members__
|
||||
@@ -91,17 +97,14 @@ class LlmModelMeta(EnumMeta):
|
||||
class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
# OpenAI models
|
||||
O3_MINI = "o3-mini"
|
||||
O3 = "o3-2025-04-16"
|
||||
O1 = "o1"
|
||||
O1_PREVIEW = "o1-preview"
|
||||
O1_MINI = "o1-mini"
|
||||
GPT41 = "gpt-4.1-2025-04-14"
|
||||
GPT4O_MINI = "gpt-4o-mini"
|
||||
GPT4O = "gpt-4o"
|
||||
GPT4_TURBO = "gpt-4-turbo"
|
||||
GPT3_5_TURBO = "gpt-3.5-turbo"
|
||||
# Anthropic models
|
||||
CLAUDE_3_7_SONNET = "claude-3-7-sonnet-20250219"
|
||||
CLAUDE_3_5_SONNET = "claude-3-5-sonnet-latest"
|
||||
CLAUDE_3_5_HAIKU = "claude-3-5-haiku-latest"
|
||||
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
|
||||
@@ -122,7 +125,6 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
OLLAMA_DOLPHIN = "dolphin-mistral:latest"
|
||||
# OpenRouter models
|
||||
GEMINI_FLASH_1_5 = "google/gemini-flash-1.5"
|
||||
GEMINI_2_5_PRO = "google/gemini-2.5-pro-preview-03-25"
|
||||
GROK_BETA = "x-ai/grok-beta"
|
||||
MISTRAL_NEMO = "mistralai/mistral-nemo"
|
||||
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
|
||||
@@ -140,13 +142,6 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1"
|
||||
MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b"
|
||||
GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b"
|
||||
META_LLAMA_4_SCOUT = "meta-llama/llama-4-scout"
|
||||
META_LLAMA_4_MAVERICK = "meta-llama/llama-4-maverick"
|
||||
# Llama API models
|
||||
LLAMA_API_LLAMA_4_SCOUT = "Llama-4-Scout-17B-16E-Instruct-FP8"
|
||||
LLAMA_API_LLAMA4_MAVERICK = "Llama-4-Maverick-17B-128E-Instruct-FP8"
|
||||
LLAMA_API_LLAMA3_3_8B = "Llama-3.3-8B-Instruct"
|
||||
LLAMA_API_LLAMA3_3_70B = "Llama-3.3-70B-Instruct"
|
||||
|
||||
@property
|
||||
def metadata(self) -> ModelMetadata:
|
||||
@@ -167,14 +162,12 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
|
||||
MODEL_METADATA = {
|
||||
# https://platform.openai.com/docs/models
|
||||
LlmModel.O3: ModelMetadata("openai", 200000, 100000),
|
||||
LlmModel.O3_MINI: ModelMetadata("openai", 200000, 100000), # o3-mini-2025-01-31
|
||||
LlmModel.O1: ModelMetadata("openai", 200000, 100000), # o1-2024-12-17
|
||||
LlmModel.O1_PREVIEW: ModelMetadata(
|
||||
"openai", 128000, 32768
|
||||
), # o1-preview-2024-09-12
|
||||
LlmModel.O1_MINI: ModelMetadata("openai", 128000, 65536), # o1-mini-2024-09-12
|
||||
LlmModel.GPT41: ModelMetadata("openai", 1047576, 32768),
|
||||
LlmModel.GPT4O_MINI: ModelMetadata(
|
||||
"openai", 128000, 16384
|
||||
), # gpt-4o-mini-2024-07-18
|
||||
@@ -184,9 +177,6 @@ MODEL_METADATA = {
|
||||
), # gpt-4-turbo-2024-04-09
|
||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385, 4096), # gpt-3.5-turbo-0125
|
||||
# https://docs.anthropic.com/en/docs/about-claude/models
|
||||
LlmModel.CLAUDE_3_7_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 8192
|
||||
), # claude-3-7-sonnet-20250219
|
||||
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 8192
|
||||
), # claude-3-5-sonnet-20241022
|
||||
@@ -212,7 +202,6 @@ MODEL_METADATA = {
|
||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768, None),
|
||||
# https://openrouter.ai/models
|
||||
LlmModel.GEMINI_FLASH_1_5: ModelMetadata("open_router", 1000000, 8192),
|
||||
LlmModel.GEMINI_2_5_PRO: ModelMetadata("open_router", 1050000, 8192),
|
||||
LlmModel.GROK_BETA: ModelMetadata("open_router", 131072, 131072),
|
||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 128000, 4096),
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 128000, 4096),
|
||||
@@ -234,13 +223,6 @@ MODEL_METADATA = {
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 300000, 5120),
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 65536, 4096),
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4096, 4096),
|
||||
LlmModel.META_LLAMA_4_SCOUT: ModelMetadata("open_router", 131072, 131072),
|
||||
LlmModel.META_LLAMA_4_MAVERICK: ModelMetadata("open_router", 1048576, 1000000),
|
||||
# Llama API models
|
||||
LlmModel.LLAMA_API_LLAMA_4_SCOUT: ModelMetadata("llama_api", 128000, 4028),
|
||||
LlmModel.LLAMA_API_LLAMA4_MAVERICK: ModelMetadata("llama_api", 128000, 4028),
|
||||
LlmModel.LLAMA_API_LLAMA3_3_8B: ModelMetadata("llama_api", 128000, 4028),
|
||||
LlmModel.LLAMA_API_LLAMA3_3_70B: ModelMetadata("llama_api", 128000, 4028),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -270,7 +252,7 @@ class LLMResponse(BaseModel):
|
||||
|
||||
def convert_openai_tool_fmt_to_anthropic(
|
||||
openai_tools: list[dict] | None = None,
|
||||
) -> Iterable[ToolParam] | anthropic.NotGiven:
|
||||
) -> Iterable[ToolParam] | NotGiven:
|
||||
"""
|
||||
Convert OpenAI tool format to Anthropic tool format.
|
||||
"""
|
||||
@@ -300,13 +282,6 @@ def convert_openai_tool_fmt_to_anthropic(
|
||||
return anthropic_tools
|
||||
|
||||
|
||||
def estimate_token_count(prompt_messages: list[dict]) -> int:
|
||||
char_count = sum(len(str(msg.get("content", ""))) for msg in prompt_messages)
|
||||
message_overhead = len(prompt_messages) * 4
|
||||
estimated_tokens = (char_count // 4) + message_overhead
|
||||
return int(estimated_tokens * 1.2)
|
||||
|
||||
|
||||
def llm_call(
|
||||
credentials: APIKeyCredentials,
|
||||
llm_model: LlmModel,
|
||||
@@ -315,7 +290,6 @@ def llm_call(
|
||||
max_tokens: int | None,
|
||||
tools: list[dict] | None = None,
|
||||
ollama_host: str = "localhost:11434",
|
||||
parallel_tool_calls: bool | None = None,
|
||||
) -> LLMResponse:
|
||||
"""
|
||||
Make a call to a language model.
|
||||
@@ -338,14 +312,7 @@ def llm_call(
|
||||
- completion_tokens: The number of tokens used in the completion.
|
||||
"""
|
||||
provider = llm_model.metadata.provider
|
||||
|
||||
# Calculate available tokens based on context window and input length
|
||||
estimated_input_tokens = estimate_token_count(prompt)
|
||||
context_window = llm_model.context_window
|
||||
model_max_output = llm_model.max_output_tokens or 4096
|
||||
user_max = max_tokens or model_max_output
|
||||
available_tokens = max(context_window - estimated_input_tokens, 0)
|
||||
max_tokens = max(min(available_tokens, model_max_output, user_max), 0)
|
||||
max_tokens = max_tokens or llm_model.max_output_tokens or 4096
|
||||
|
||||
if provider == "openai":
|
||||
tools_param = tools if tools else openai.NOT_GIVEN
|
||||
@@ -368,9 +335,6 @@ def llm_call(
|
||||
response_format=response_format, # type: ignore
|
||||
max_completion_tokens=max_tokens,
|
||||
tools=tools_param, # type: ignore
|
||||
parallel_tool_calls=(
|
||||
openai.NOT_GIVEN if parallel_tool_calls is None else parallel_tool_calls
|
||||
),
|
||||
)
|
||||
|
||||
if response.choices[0].message.tool_calls:
|
||||
@@ -451,7 +415,7 @@ def llm_call(
|
||||
|
||||
if not tool_calls and resp.stop_reason == "tool_use":
|
||||
logger.warning(
|
||||
f"Tool use stop reason but no tool calls found in content. {resp}"
|
||||
"Tool use stop reason but no tool calls found in content. %s", resp
|
||||
)
|
||||
|
||||
return LLMResponse(
|
||||
@@ -460,7 +424,7 @@ def llm_call(
|
||||
response=(
|
||||
resp.content[0].name
|
||||
if isinstance(resp.content[0], anthropic.types.ToolUseBlock)
|
||||
else getattr(resp.content[0], "text", "")
|
||||
else resp.content[0].text
|
||||
),
|
||||
tool_calls=tool_calls,
|
||||
prompt_tokens=resp.usage.input_tokens,
|
||||
@@ -501,7 +465,6 @@ def llm_call(
|
||||
model=llm_model.value,
|
||||
prompt=f"{sys_messages}\n\n{usr_messages}",
|
||||
stream=False,
|
||||
options={"num_ctx": max_tokens},
|
||||
)
|
||||
return LLMResponse(
|
||||
raw_response=response.get("response") or "",
|
||||
@@ -550,56 +513,6 @@ def llm_call(
|
||||
else:
|
||||
tool_calls = None
|
||||
|
||||
return LLMResponse(
|
||||
raw_response=response.choices[0].message,
|
||||
prompt=prompt,
|
||||
response=response.choices[0].message.content or "",
|
||||
tool_calls=tool_calls,
|
||||
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
|
||||
completion_tokens=response.usage.completion_tokens if response.usage else 0,
|
||||
)
|
||||
elif provider == "llama_api":
|
||||
tools_param = tools if tools else openai.NOT_GIVEN
|
||||
client = openai.OpenAI(
|
||||
base_url="https://api.llama.com/compat/v1/",
|
||||
api_key=credentials.api_key.get_secret_value(),
|
||||
)
|
||||
|
||||
response = client.chat.completions.create(
|
||||
extra_headers={
|
||||
"HTTP-Referer": "https://agpt.co",
|
||||
"X-Title": "AutoGPT",
|
||||
},
|
||||
model=llm_model.value,
|
||||
messages=prompt, # type: ignore
|
||||
max_tokens=max_tokens,
|
||||
tools=tools_param, # type: ignore
|
||||
parallel_tool_calls=(
|
||||
openai.NOT_GIVEN if parallel_tool_calls is None else parallel_tool_calls
|
||||
),
|
||||
)
|
||||
|
||||
# If there's no response, raise an error
|
||||
if not response.choices:
|
||||
if response:
|
||||
raise ValueError(f"Llama API error: {response}")
|
||||
else:
|
||||
raise ValueError("No response from Llama API.")
|
||||
|
||||
if response.choices[0].message.tool_calls:
|
||||
tool_calls = [
|
||||
ToolContentBlock(
|
||||
id=tool.id,
|
||||
type=tool.type,
|
||||
function=ToolCall(
|
||||
name=tool.function.name, arguments=tool.function.arguments
|
||||
),
|
||||
)
|
||||
for tool in response.choices[0].message.tool_calls
|
||||
]
|
||||
else:
|
||||
tool_calls = None
|
||||
|
||||
return LLMResponse(
|
||||
raw_response=response.choices[0].message,
|
||||
prompt=prompt,
|
||||
@@ -615,7 +528,7 @@ def llm_call(
|
||||
class AIBlockBase(Block, ABC):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.prompt = []
|
||||
self.prompt = ""
|
||||
|
||||
def merge_llm_stats(self, block: "AIBlockBase"):
|
||||
self.merge_stats(block.execution_stats)
|
||||
@@ -645,7 +558,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
description="The system prompt to provide additional context to the model.",
|
||||
)
|
||||
conversation_history: list[dict] = SchemaField(
|
||||
default_factory=list,
|
||||
default=[],
|
||||
description="The conversation history to provide context for the prompt.",
|
||||
)
|
||||
retry: int = SchemaField(
|
||||
@@ -655,7 +568,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
)
|
||||
prompt_values: dict[str, str] = SchemaField(
|
||||
advanced=False,
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
|
||||
)
|
||||
max_tokens: int | None = SchemaField(
|
||||
@@ -674,7 +587,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
response: dict[str, Any] = SchemaField(
|
||||
description="The response object generated by the language model."
|
||||
)
|
||||
prompt: list = SchemaField(description="The prompt sent to the language model.")
|
||||
prompt: str = SchemaField(description="The prompt sent to the language model.")
|
||||
error: str = SchemaField(description="Error message if the API call failed.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -696,7 +609,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("response", {"key1": "key1Value", "key2": "key2Value"}),
|
||||
("prompt", list),
|
||||
("prompt", str),
|
||||
],
|
||||
test_mock={
|
||||
"llm_call": lambda *args, **kwargs: LLMResponse(
|
||||
@@ -729,7 +642,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
Test mocks work only on class functions, this wraps the llm_call function
|
||||
so that it can be mocked withing the block testing framework.
|
||||
"""
|
||||
self.prompt = prompt
|
||||
return llm_call(
|
||||
credentials=credentials,
|
||||
llm_model=llm_model,
|
||||
@@ -847,16 +759,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
prompt.append({"role": "user", "content": retry_prompt})
|
||||
except Exception as e:
|
||||
logger.exception(f"Error calling LLM: {e}")
|
||||
if (
|
||||
"maximum context length" in str(e).lower()
|
||||
or "token limit" in str(e).lower()
|
||||
):
|
||||
if input_data.max_tokens is None:
|
||||
input_data.max_tokens = llm_model.max_output_tokens or 4096
|
||||
input_data.max_tokens = int(input_data.max_tokens * 0.85)
|
||||
logger.debug(
|
||||
f"Reducing max_tokens to {input_data.max_tokens} for next attempt"
|
||||
)
|
||||
retry_prompt = f"Error calling LLM: {e}"
|
||||
finally:
|
||||
self.merge_stats(
|
||||
@@ -894,7 +796,7 @@ class AITextGeneratorBlock(AIBlockBase):
|
||||
)
|
||||
prompt_values: dict[str, str] = SchemaField(
|
||||
advanced=False,
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
@@ -912,7 +814,7 @@ class AITextGeneratorBlock(AIBlockBase):
|
||||
response: str = SchemaField(
|
||||
description="The response generated by the language model."
|
||||
)
|
||||
prompt: list = SchemaField(description="The prompt sent to the language model.")
|
||||
prompt: str = SchemaField(description="The prompt sent to the language model.")
|
||||
error: str = SchemaField(description="Error message if the API call failed.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -929,7 +831,7 @@ class AITextGeneratorBlock(AIBlockBase):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("response", "Response text"),
|
||||
("prompt", list),
|
||||
("prompt", str),
|
||||
],
|
||||
test_mock={"llm_call": lambda *args, **kwargs: "Response text"},
|
||||
)
|
||||
@@ -948,10 +850,7 @@ class AITextGeneratorBlock(AIBlockBase):
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
object_input_data = AIStructuredResponseGeneratorBlock.Input(
|
||||
**{
|
||||
attr: getattr(input_data, attr)
|
||||
for attr in AITextGeneratorBlock.Input.model_fields
|
||||
},
|
||||
**{attr: getattr(input_data, attr) for attr in input_data.model_fields},
|
||||
expected_format={},
|
||||
)
|
||||
yield "response", self.llm_call(object_input_data, credentials)
|
||||
@@ -1008,7 +907,7 @@ class AITextSummarizerBlock(AIBlockBase):
|
||||
|
||||
class Output(BlockSchema):
|
||||
summary: str = SchemaField(description="The final summary of the text.")
|
||||
prompt: list = SchemaField(description="The prompt sent to the language model.")
|
||||
prompt: str = SchemaField(description="The prompt sent to the language model.")
|
||||
error: str = SchemaField(description="Error message if the API call failed.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -1025,7 +924,7 @@ class AITextSummarizerBlock(AIBlockBase):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("summary", "Final summary of a long text"),
|
||||
("prompt", list),
|
||||
("prompt", str),
|
||||
],
|
||||
test_mock={
|
||||
"llm_call": lambda input_data, credentials: (
|
||||
@@ -1134,14 +1033,8 @@ class AITextSummarizerBlock(AIBlockBase):
|
||||
|
||||
class AIConversationBlock(AIBlockBase):
|
||||
class Input(BlockSchema):
|
||||
prompt: str = SchemaField(
|
||||
description="The prompt to send to the language model.",
|
||||
placeholder="Enter your prompt here...",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
messages: List[Any] = SchemaField(
|
||||
description="List of messages in the conversation.",
|
||||
description="List of messages in the conversation.", min_length=1
|
||||
)
|
||||
model: LlmModel = SchemaField(
|
||||
title="LLM Model",
|
||||
@@ -1164,7 +1057,7 @@ class AIConversationBlock(AIBlockBase):
|
||||
response: str = SchemaField(
|
||||
description="The model's response to the conversation."
|
||||
)
|
||||
prompt: list = SchemaField(description="The prompt sent to the language model.")
|
||||
prompt: str = SchemaField(description="The prompt sent to the language model.")
|
||||
error: str = SchemaField(description="Error message if the API call failed.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -1193,7 +1086,7 @@ class AIConversationBlock(AIBlockBase):
|
||||
"response",
|
||||
"The 2020 World Series was played at Globe Life Field in Arlington, Texas.",
|
||||
),
|
||||
("prompt", list),
|
||||
("prompt", str),
|
||||
],
|
||||
test_mock={
|
||||
"llm_call": lambda *args, **kwargs: "The 2020 World Series was played at Globe Life Field in Arlington, Texas."
|
||||
@@ -1215,7 +1108,7 @@ class AIConversationBlock(AIBlockBase):
|
||||
) -> BlockOutput:
|
||||
response = self.llm_call(
|
||||
AIStructuredResponseGeneratorBlock.Input(
|
||||
prompt=input_data.prompt,
|
||||
prompt="",
|
||||
credentials=input_data.credentials,
|
||||
model=input_data.model,
|
||||
conversation_history=input_data.messages,
|
||||
@@ -1273,7 +1166,7 @@ class AIListGeneratorBlock(AIBlockBase):
|
||||
list_item: str = SchemaField(
|
||||
description="Each individual item in the list.",
|
||||
)
|
||||
prompt: list = SchemaField(description="The prompt sent to the language model.")
|
||||
prompt: str = SchemaField(description="The prompt sent to the language model.")
|
||||
error: str = SchemaField(
|
||||
description="Error message if the list generation failed."
|
||||
)
|
||||
@@ -1305,7 +1198,7 @@ class AIListGeneratorBlock(AIBlockBase):
|
||||
"generated_list",
|
||||
["Zylora Prime", "Kharon-9", "Vortexia", "Oceara", "Draknos"],
|
||||
),
|
||||
("prompt", list),
|
||||
("prompt", str),
|
||||
("list_item", "Zylora Prime"),
|
||||
("list_item", "Kharon-9"),
|
||||
("list_item", "Vortexia"),
|
||||
|
||||
@@ -8,13 +8,13 @@ from moviepy.video.io.VideoFileClip import VideoFileClip
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
|
||||
from backend.util.file import MediaFile, get_exec_file_path, store_media_file
|
||||
|
||||
|
||||
class MediaDurationBlock(Block):
|
||||
|
||||
class Input(BlockSchema):
|
||||
media_in: MediaFileType = SchemaField(
|
||||
media_in: MediaFile = SchemaField(
|
||||
description="Media input (URL, data URI, or local path)."
|
||||
)
|
||||
is_video: bool = SchemaField(
|
||||
@@ -69,7 +69,7 @@ class LoopVideoBlock(Block):
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
video_in: MediaFileType = SchemaField(
|
||||
video_in: MediaFile = SchemaField(
|
||||
description="The input video (can be a URL, data URI, or local path)."
|
||||
)
|
||||
# Provide EITHER a `duration` or `n_loops` or both. We'll demonstrate `duration`.
|
||||
@@ -137,7 +137,7 @@ class LoopVideoBlock(Block):
|
||||
assert isinstance(looped_clip, VideoFileClip)
|
||||
|
||||
# 4) Save the looped output
|
||||
output_filename = MediaFileType(
|
||||
output_filename = MediaFile(
|
||||
f"{node_exec_id}_looped_{os.path.basename(local_video_path)}"
|
||||
)
|
||||
output_abspath = get_exec_file_path(graph_exec_id, output_filename)
|
||||
@@ -162,10 +162,10 @@ class AddAudioToVideoBlock(Block):
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
video_in: MediaFileType = SchemaField(
|
||||
video_in: MediaFile = SchemaField(
|
||||
description="Video input (URL, data URI, or local path)."
|
||||
)
|
||||
audio_in: MediaFileType = SchemaField(
|
||||
audio_in: MediaFile = SchemaField(
|
||||
description="Audio input (URL, data URI, or local path)."
|
||||
)
|
||||
volume: float = SchemaField(
|
||||
@@ -178,7 +178,7 @@ class AddAudioToVideoBlock(Block):
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_out: MediaFileType = SchemaField(
|
||||
video_out: MediaFile = SchemaField(
|
||||
description="Final video (with attached audio), as a path or data URI."
|
||||
)
|
||||
error: str = SchemaField(
|
||||
@@ -229,7 +229,7 @@ class AddAudioToVideoBlock(Block):
|
||||
final_clip = video_clip.with_audio(audio_clip)
|
||||
|
||||
# 4) Write to output file
|
||||
output_filename = MediaFileType(
|
||||
output_filename = MediaFile(
|
||||
f"{node_exec_id}_audio_attached_{os.path.basename(local_video_path)}"
|
||||
)
|
||||
output_abspath = os.path.join(abs_temp_dir, output_filename)
|
||||
|
||||
@@ -65,7 +65,7 @@ class AddMemoryBlock(Block, Mem0Base):
|
||||
default=Content(discriminator="content", content="I'm a vegetarian"),
|
||||
)
|
||||
metadata: dict[str, Any] = SchemaField(
|
||||
description="Optional metadata for the memory", default_factory=dict
|
||||
description="Optional metadata for the memory", default={}
|
||||
)
|
||||
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
@@ -173,7 +173,7 @@ class SearchMemoryBlock(Block, Mem0Base):
|
||||
)
|
||||
categories_filter: list[str] = SchemaField(
|
||||
description="Categories to filter by",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
|
||||
@@ -6,14 +6,13 @@ from backend.blocks.nvidia._auth import (
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
from backend.util.type import MediaFileType
|
||||
|
||||
|
||||
class NvidiaDeepfakeDetectBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: NvidiaCredentialsInput = NvidiaCredentialsField()
|
||||
image_base64: MediaFileType = SchemaField(
|
||||
description="Image to analyze for deepfakes",
|
||||
image_base64: str = SchemaField(
|
||||
description="Image to analyze for deepfakes", image_upload=True
|
||||
)
|
||||
return_image: bool = SchemaField(
|
||||
description="Whether to return the processed image with markings",
|
||||
@@ -23,12 +22,16 @@ class NvidiaDeepfakeDetectBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
status: str = SchemaField(
|
||||
description="Detection status (SUCCESS, ERROR, CONTENT_FILTERED)",
|
||||
default="",
|
||||
)
|
||||
image: MediaFileType = SchemaField(
|
||||
image: str = SchemaField(
|
||||
description="Processed image with detection markings (if return_image=True)",
|
||||
default="",
|
||||
image_output=True,
|
||||
)
|
||||
is_deepfake: float = SchemaField(
|
||||
description="Probability that the image is a deepfake (0-1)",
|
||||
default=0.0,
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -177,8 +177,7 @@ class PineconeInsertBlock(Block):
|
||||
description="Namespace to use in Pinecone", default=""
|
||||
)
|
||||
metadata: dict = SchemaField(
|
||||
description="Additional metadata to store with each vector",
|
||||
default_factory=dict,
|
||||
description="Additional metadata to store with each vector", default={}
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
@@ -198,7 +198,7 @@ class ReplicateFluxAdvancedModelBlock(Block):
|
||||
safety_tolerance,
|
||||
):
|
||||
# Initialize Replicate client with the API key
|
||||
client = ReplicateClient(api_token=api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=api_key.get_secret_value())
|
||||
|
||||
# Run the model with additional parameters
|
||||
output: FileOutput | list[FileOutput] = client.run( # type: ignore This is because they changed the return type, and didn't update the type hint! It should be overloaded depending on the value of `use_file_output` to `FileOutput | list[FileOutput]` but it's `Any | Iterator[Any]`
|
||||
|
||||
@@ -12,7 +12,7 @@ from backend.data.model import (
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.file import MediaFileType, store_media_file
|
||||
from backend.util.file import MediaFile, store_media_file
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ class ScreenshotWebPageBlock(Block):
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
image: MediaFileType = SchemaField(description="The screenshot image data")
|
||||
image: MediaFile = SchemaField(description="The screenshot image data")
|
||||
error: str = SchemaField(description="Error message if the screenshot failed")
|
||||
|
||||
def __init__(self):
|
||||
@@ -142,9 +142,7 @@ class ScreenshotWebPageBlock(Block):
|
||||
return {
|
||||
"image": store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=MediaFileType(
|
||||
f"data:image/{format.value};base64,{b64encode(response.content).decode('utf-8')}"
|
||||
),
|
||||
file=f"data:image/{format.value};base64,{b64encode(response.content).decode('utf-8')}",
|
||||
return_content=True,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ from backend.data.block import (
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util import settings
|
||||
from backend.util.settings import AppEnvironment, BehaveAs
|
||||
|
||||
@@ -26,7 +25,7 @@ class Slant3DTriggerBase:
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
# Webhook URL is handled by the webhook system
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
@@ -83,7 +82,7 @@ class Slant3DOrderWebhookBlock(Slant3DTriggerBase, Block):
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.SLANT3D,
|
||||
provider="slant3d",
|
||||
webhook_type="orders", # Only one type for now
|
||||
resource_format="", # No resource format needed
|
||||
event_filter_input="events",
|
||||
|
||||
@@ -14,6 +14,7 @@ from backend.data.block import (
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockType,
|
||||
get_block,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
@@ -26,10 +27,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
@thread_cached
|
||||
def get_database_manager_client():
|
||||
from backend.executor import DatabaseManagerClient
|
||||
from backend.executor import DatabaseManager
|
||||
from backend.util.service import get_service_client
|
||||
|
||||
return get_service_client(DatabaseManagerClient)
|
||||
return get_service_client(DatabaseManager)
|
||||
|
||||
|
||||
def _get_tool_requests(entry: dict[str, Any]) -> list[str]:
|
||||
@@ -154,7 +155,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
description="The system prompt to provide additional context to the model.",
|
||||
)
|
||||
conversation_history: list[dict] = SchemaField(
|
||||
default_factory=list,
|
||||
default=[],
|
||||
description="The conversation history to provide context for the prompt.",
|
||||
)
|
||||
last_tool_output: Any = SchemaField(
|
||||
@@ -168,7 +169,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
)
|
||||
prompt_values: dict[str, str] = SchemaField(
|
||||
advanced=False,
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
|
||||
)
|
||||
max_tokens: int | None = SchemaField(
|
||||
@@ -246,10 +247,6 @@ class SmartDecisionMakerBlock(Block):
|
||||
test_credentials=llm.TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def cleanup(s: str):
|
||||
return re.sub(r"[^a-zA-Z0-9_-]", "_", s).lower()
|
||||
|
||||
@staticmethod
|
||||
def _create_block_function_signature(
|
||||
sink_node: "Node", links: list["Link"]
|
||||
@@ -267,10 +264,12 @@ class SmartDecisionMakerBlock(Block):
|
||||
Raises:
|
||||
ValueError: If the block specified by sink_node.block_id is not found.
|
||||
"""
|
||||
block = sink_node.block
|
||||
block = get_block(sink_node.block_id)
|
||||
if not block:
|
||||
raise ValueError(f"Block not found: {sink_node.block_id}")
|
||||
|
||||
tool_function: dict[str, Any] = {
|
||||
"name": SmartDecisionMakerBlock.cleanup(block.name),
|
||||
"name": re.sub(r"[^a-zA-Z0-9_-]", "_", block.name).lower(),
|
||||
"description": block.description,
|
||||
}
|
||||
|
||||
@@ -285,7 +284,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
and sink_block_input_schema.model_fields[link.sink_name].description
|
||||
else f"The {link.sink_name} of the tool"
|
||||
)
|
||||
properties[SmartDecisionMakerBlock.cleanup(link.sink_name)] = {
|
||||
properties[link.sink_name.lower()] = {
|
||||
"type": "string",
|
||||
"description": description,
|
||||
}
|
||||
@@ -330,7 +329,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
)
|
||||
|
||||
tool_function: dict[str, Any] = {
|
||||
"name": SmartDecisionMakerBlock.cleanup(sink_graph_meta.name),
|
||||
"name": re.sub(r"[^a-zA-Z0-9_-]", "_", sink_graph_meta.name).lower(),
|
||||
"description": sink_graph_meta.description,
|
||||
}
|
||||
|
||||
@@ -345,7 +344,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
in sink_block_input_schema["properties"][link.sink_name]
|
||||
else f"The {link.sink_name} of the tool"
|
||||
)
|
||||
properties[SmartDecisionMakerBlock.cleanup(link.sink_name)] = {
|
||||
properties[link.sink_name.lower()] = {
|
||||
"type": "string",
|
||||
"description": description,
|
||||
}
|
||||
@@ -495,7 +494,6 @@ class SmartDecisionMakerBlock(Block):
|
||||
max_tokens=input_data.max_tokens,
|
||||
tools=tool_functions,
|
||||
ollama_host=input_data.ollama_host,
|
||||
parallel_tool_calls=False,
|
||||
)
|
||||
|
||||
if not response.tool_calls:
|
||||
@@ -507,7 +505,7 @@ class SmartDecisionMakerBlock(Block):
|
||||
tool_args = json.loads(tool_call.function.arguments)
|
||||
|
||||
for arg_name, arg_value in tool_args.items():
|
||||
yield f"tools_^_{tool_name}_~_{arg_name}", arg_value
|
||||
yield f"tools_^_{tool_name}_{arg_name}".lower(), arg_value
|
||||
|
||||
response.prompt.append(response.raw_response)
|
||||
yield "conversations", response.prompt
|
||||
|
||||
@@ -112,7 +112,7 @@ class AddLeadToCampaignBlock(Block):
|
||||
lead_list: list[LeadInput] = SchemaField(
|
||||
description="An array of JSON objects, each representing a lead's details. Can hold max 100 leads.",
|
||||
max_length=100,
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
settings: LeadUploadSettings = SchemaField(
|
||||
@@ -248,7 +248,7 @@ class SaveCampaignSequencesBlock(Block):
|
||||
)
|
||||
sequences: list[Sequence] = SchemaField(
|
||||
description="The sequences to save",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
credentials: SmartLeadCredentialsInput = SchemaField(
|
||||
|
||||
@@ -39,7 +39,7 @@ class LeadCustomFields(BaseModel):
|
||||
fields: dict[str, str] = SchemaField(
|
||||
description="Custom fields for a lead (max 20 fields)",
|
||||
max_length=20,
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
|
||||
|
||||
@@ -85,7 +85,7 @@ class AddLeadsRequest(BaseModel):
|
||||
lead_list: list[LeadInput] = SchemaField(
|
||||
description="List of leads to add to the campaign",
|
||||
max_length=100,
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
settings: LeadUploadSettings
|
||||
campaign_id: int
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing_extensions import Optional
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TODOIST_OAUTH_IS_CONFIGURED,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
@@ -62,7 +61,6 @@ class TodoistCreateCommentBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateCommentBlock.Input,
|
||||
output_schema=TodoistCreateCommentBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"content": "Test comment",
|
||||
@@ -166,7 +164,6 @@ class TodoistGetCommentsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetCommentsBlock.Input,
|
||||
output_schema=TodoistGetCommentsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"id_type": {"discriminator": "task", "task_id": "2995104339"},
|
||||
@@ -271,7 +268,6 @@ class TodoistGetCommentBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetCommentBlock.Input,
|
||||
output_schema=TodoistGetCommentBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
@@ -350,7 +346,6 @@ class TodoistUpdateCommentBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateCommentBlock.Input,
|
||||
output_schema=TodoistUpdateCommentBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
@@ -409,7 +404,6 @@ class TodoistDeleteCommentBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteCommentBlock.Input,
|
||||
output_schema=TodoistDeleteCommentBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing_extensions import Optional
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TODOIST_OAUTH_IS_CONFIGURED,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
@@ -43,7 +42,6 @@ class TodoistCreateLabelBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateLabelBlock.Input,
|
||||
output_schema=TodoistCreateLabelBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"name": "Test Label",
|
||||
@@ -132,7 +130,6 @@ class TodoistListLabelsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListLabelsBlock.Input,
|
||||
output_schema=TodoistListLabelsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
@@ -214,7 +211,6 @@ class TodoistGetLabelBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetLabelBlock.Input,
|
||||
output_schema=TodoistGetLabelBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
@@ -297,7 +293,6 @@ class TodoistUpdateLabelBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateLabelBlock.Input,
|
||||
output_schema=TodoistUpdateLabelBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
@@ -369,7 +364,6 @@ class TodoistDeleteLabelBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteLabelBlock.Input,
|
||||
output_schema=TodoistDeleteLabelBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
@@ -421,7 +415,6 @@ class TodoistGetSharedLabelsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetSharedLabelsBlock.Input,
|
||||
output_schema=TodoistGetSharedLabelsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("labels", ["Label1", "Label2", "Label3"])],
|
||||
@@ -478,7 +471,6 @@ class TodoistRenameSharedLabelsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistRenameSharedLabelsBlock.Input,
|
||||
output_schema=TodoistRenameSharedLabelsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"name": "OldLabel",
|
||||
@@ -534,7 +526,6 @@ class TodoistRemoveSharedLabelsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistRemoveSharedLabelsBlock.Input,
|
||||
output_schema=TodoistRemoveSharedLabelsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "LabelToRemove"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing_extensions import Optional
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TODOIST_OAUTH_IS_CONFIGURED,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
@@ -36,7 +35,6 @@ class TodoistListProjectsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListProjectsBlock.Input,
|
||||
output_schema=TodoistListProjectsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
@@ -152,7 +150,6 @@ class TodoistCreateProjectBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateProjectBlock.Input,
|
||||
output_schema=TodoistCreateProjectBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "Test Project"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
@@ -233,7 +230,6 @@ class TodoistGetProjectBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetProjectBlock.Input,
|
||||
output_schema=TodoistGetProjectBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
@@ -336,7 +332,6 @@ class TodoistUpdateProjectBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateProjectBlock.Input,
|
||||
output_schema=TodoistUpdateProjectBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
@@ -418,7 +413,6 @@ class TodoistDeleteProjectBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteProjectBlock.Input,
|
||||
output_schema=TodoistDeleteProjectBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
@@ -487,7 +481,6 @@ class TodoistListCollaboratorsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListCollaboratorsBlock.Input,
|
||||
output_schema=TodoistListCollaboratorsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing_extensions import Optional
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TODOIST_OAUTH_IS_CONFIGURED,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
@@ -37,7 +36,6 @@ class TodoistListSectionsBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListSectionsBlock.Input,
|
||||
output_schema=TodoistListSectionsBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
@@ -209,7 +207,6 @@ class TodoistGetSectionBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetSectionBlock.Input,
|
||||
output_schema=TodoistGetSectionBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
@@ -278,7 +275,6 @@ class TodoistDeleteSectionBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteSectionBlock.Input,
|
||||
output_schema=TodoistDeleteSectionBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing_extensions import Optional
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TODOIST_OAUTH_IS_CONFIGURED,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
@@ -87,7 +86,6 @@ class TodoistCreateTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateTaskBlock.Input,
|
||||
output_schema=TodoistCreateTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"content": "Buy groceries",
|
||||
@@ -219,7 +217,6 @@ class TodoistGetTasksBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetTasksBlock.Input,
|
||||
output_schema=TodoistGetTasksBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
@@ -312,7 +309,6 @@ class TodoistGetTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetTaskBlock.Input,
|
||||
output_schema=TodoistGetTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
@@ -432,7 +428,6 @@ class TodoistUpdateTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateTaskBlock.Input,
|
||||
output_schema=TodoistUpdateTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"task_id": "2995104339",
|
||||
@@ -472,24 +467,32 @@ class TodoistUpdateTaskBlock(Block):
|
||||
)
|
||||
|
||||
task_updates = {}
|
||||
update_fields = {
|
||||
"content": input_data.content,
|
||||
"description": input_data.description,
|
||||
"project_id": input_data.project_id,
|
||||
"section_id": input_data.section_id,
|
||||
"parent_id": input_data.parent_id,
|
||||
"order": input_data.order,
|
||||
"labels": input_data.labels,
|
||||
"priority": input_data.priority,
|
||||
"due_date": due_date,
|
||||
"deadline_date": deadline_date,
|
||||
"assignee_id": input_data.assignee_id,
|
||||
"duration": input_data.duration,
|
||||
"duration_unit": input_data.duration_unit,
|
||||
}
|
||||
|
||||
# Filter out None values
|
||||
task_updates = {k: v for k, v in update_fields.items() if v is not None}
|
||||
if input_data.content is not None:
|
||||
task_updates["content"] = input_data.content
|
||||
if input_data.description is not None:
|
||||
task_updates["description"] = input_data.description
|
||||
if input_data.project_id is not None:
|
||||
task_updates["project_id"] = input_data.project_id
|
||||
if input_data.section_id is not None:
|
||||
task_updates["section_id"] = input_data.section_id
|
||||
if input_data.parent_id is not None:
|
||||
task_updates["parent_id"] = input_data.parent_id
|
||||
if input_data.order is not None:
|
||||
task_updates["order"] = input_data.order
|
||||
if input_data.labels is not None:
|
||||
task_updates["labels"] = input_data.labels
|
||||
if input_data.priority is not None:
|
||||
task_updates["priority"] = input_data.priority
|
||||
if due_date is not None:
|
||||
task_updates["due_date"] = due_date
|
||||
if deadline_date is not None:
|
||||
task_updates["deadline_date"] = deadline_date
|
||||
if input_data.assignee_id is not None:
|
||||
task_updates["assignee_id"] = input_data.assignee_id
|
||||
if input_data.duration is not None:
|
||||
task_updates["duration"] = input_data.duration
|
||||
if input_data.duration_unit is not None:
|
||||
task_updates["duration_unit"] = input_data.duration_unit
|
||||
|
||||
self.update_task(
|
||||
credentials,
|
||||
@@ -523,7 +526,6 @@ class TodoistCloseTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCloseTaskBlock.Input,
|
||||
output_schema=TodoistCloseTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
@@ -574,7 +576,6 @@ class TodoistReopenTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistReopenTaskBlock.Input,
|
||||
output_schema=TodoistReopenTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
@@ -627,7 +628,6 @@ class TodoistDeleteTaskBlock(Block):
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteTaskBlock.Input,
|
||||
output_schema=TodoistDeleteTaskBlock.Output,
|
||||
disabled=not TODOIST_OAUTH_IS_CONFIGURED,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Todo : Add new Type support, and disable block if it's Oauth is not configured
|
||||
# Todo : Add new Type support
|
||||
|
||||
# from typing import cast
|
||||
# import tweepy
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Todo : Add new Type support, and disable block if it's Oauth is not configured
|
||||
# Todo : Add new Type support
|
||||
|
||||
# from typing import cast
|
||||
|
||||
@@ -156,7 +156,7 @@
|
||||
# participant_ids: list[str] = SchemaField(
|
||||
# description="Array of User IDs to create conversation with (max 50)",
|
||||
# placeholder="Enter participant user IDs",
|
||||
# default_factory=list,
|
||||
# default=[],
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import tweepy
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -45,7 +44,6 @@ class TwitterUnfollowListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnfollowListBlock.Input,
|
||||
output_schema=TwitterUnfollowListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
@@ -108,7 +106,6 @@ class TwitterFollowListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterFollowListBlock.Input,
|
||||
output_schema=TwitterFollowListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -40,6 +39,7 @@ class TwitterGetListBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to lookup",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -64,7 +64,6 @@ class TwitterGetListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListBlock.Input,
|
||||
output_schema=TwitterGetListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "84839422",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -185,6 +184,7 @@ class TwitterGetOwnedListsBlock(Block):
|
||||
user_id: str = SchemaField(
|
||||
description="The user ID whose owned Lists to retrieve",
|
||||
placeholder="Enter user ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int | None = SchemaField(
|
||||
@@ -222,7 +222,6 @@ class TwitterGetOwnedListsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetOwnedListsBlock.Input,
|
||||
output_schema=TwitterGetOwnedListsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"user_id": "2244994945",
|
||||
"max_results": 10,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -46,11 +45,13 @@ class TwitterRemoveListMemberBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to remove the member from",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user to remove from the List",
|
||||
placeholder="Enter user ID to remove",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -66,7 +67,6 @@ class TwitterRemoveListMemberBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveListMemberBlock.Input,
|
||||
output_schema=TwitterRemoveListMemberBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"user_id": "987654321",
|
||||
@@ -120,11 +120,13 @@ class TwitterAddListMemberBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to add the member to",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user to add to the List",
|
||||
placeholder="Enter user ID to add",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -140,7 +142,6 @@ class TwitterAddListMemberBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterAddListMemberBlock.Input,
|
||||
output_schema=TwitterAddListMemberBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"user_id": "987654321",
|
||||
@@ -194,6 +195,7 @@ class TwitterGetListMembersBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to get members from",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int | None = SchemaField(
|
||||
@@ -232,7 +234,6 @@ class TwitterGetListMembersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListMembersBlock.Input,
|
||||
output_schema=TwitterGetListMembersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"max_results": 2,
|
||||
@@ -375,6 +376,7 @@ class TwitterGetListMembershipsBlock(Block):
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user whose List memberships to retrieve",
|
||||
placeholder="Enter user ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int | None = SchemaField(
|
||||
@@ -409,7 +411,6 @@ class TwitterGetListMembershipsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListMembershipsBlock.Input,
|
||||
output_schema=TwitterGetListMembershipsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"user_id": "123456789",
|
||||
"max_results": 1,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -43,6 +42,7 @@ class TwitterGetListTweetsBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List whose Tweets you would like to retrieve",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int | None = SchemaField(
|
||||
@@ -82,7 +82,6 @@ class TwitterGetListTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListTweetsBlock.Input,
|
||||
output_schema=TwitterGetListTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "84839422",
|
||||
"max_results": 1,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -29,6 +28,7 @@ class TwitterDeleteListBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to be deleted",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -42,7 +42,6 @@ class TwitterDeleteListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterDeleteListBlock.Input,
|
||||
output_schema=TwitterDeleteListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={"list_id": "1234567890", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
@@ -120,7 +119,6 @@ class TwitterUpdateListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUpdateListBlock.Input,
|
||||
output_schema=TwitterUpdateListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"list_id": "1234567890",
|
||||
"name": "Updated List Name",
|
||||
@@ -217,7 +215,6 @@ class TwitterCreateListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterCreateListBlock.Input,
|
||||
output_schema=TwitterCreateListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"name": "New List Name",
|
||||
"description": "New List Description",
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -40,6 +39,7 @@ class TwitterUnpinListBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to unpin",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -53,7 +53,6 @@ class TwitterUnpinListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnpinListBlock.Input,
|
||||
output_schema=TwitterUnpinListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
@@ -104,6 +103,7 @@ class TwitterPinListBlock(Block):
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to pin",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -117,7 +117,6 @@ class TwitterPinListBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterPinListBlock.Input,
|
||||
output_schema=TwitterPinListBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
@@ -187,7 +186,6 @@ class TwitterGetPinnedListsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetPinnedListsBlock.Input,
|
||||
output_schema=TwitterGetPinnedListsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"expansions": None,
|
||||
"list_fields": None,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -79,7 +78,6 @@ class TwitterSearchSpacesBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterSearchSpacesBlock.Input,
|
||||
output_schema=TwitterSearchSpacesBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"query": "tech",
|
||||
"max_results": 1,
|
||||
|
||||
@@ -7,7 +7,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -45,7 +44,7 @@ class SpaceList(BaseModel):
|
||||
space_ids: list[str] = SchemaField(
|
||||
description="List of Space IDs to lookup (up to 100)",
|
||||
placeholder="Enter Space IDs",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -55,7 +54,7 @@ class UserList(BaseModel):
|
||||
user_ids: list[str] = SchemaField(
|
||||
description="List of user IDs to lookup their Spaces (up to 100)",
|
||||
placeholder="Enter user IDs",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -95,7 +94,6 @@ class TwitterGetSpacesBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpacesBlock.Input,
|
||||
output_schema=TwitterGetSpacesBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"identifier": {
|
||||
"discriminator": "space_list",
|
||||
@@ -229,6 +227,7 @@ class TwitterGetSpaceByIdBlock(Block):
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -251,7 +250,6 @@ class TwitterGetSpaceByIdBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceByIdBlock.Input,
|
||||
output_schema=TwitterGetSpaceByIdBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -391,6 +389,7 @@ class TwitterGetSpaceBuyersBlock(Block):
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup buyers for",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -412,7 +411,6 @@ class TwitterGetSpaceBuyersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceBuyersBlock.Input,
|
||||
output_schema=TwitterGetSpaceBuyersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -519,6 +517,7 @@ class TwitterGetSpaceTweetsBlock(Block):
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup tweets for",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -541,7 +540,6 @@ class TwitterGetSpaceTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceTweetsBlock.Input,
|
||||
output_schema=TwitterGetSpaceTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -56,7 +55,6 @@ class TwitterBookmarkTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterBookmarkTweetBlock.Input,
|
||||
output_schema=TwitterBookmarkTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -149,7 +147,6 @@ class TwitterGetBookmarkedTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetBookmarkedTweetsBlock.Input,
|
||||
output_schema=TwitterGetBookmarkedTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"max_results": 2,
|
||||
"pagination_token": None,
|
||||
@@ -333,7 +330,6 @@ class TwitterRemoveBookmarkTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveBookmarkTweetBlock.Input,
|
||||
output_schema=TwitterRemoveBookmarkTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -3,7 +3,6 @@ import tweepy
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -39,7 +38,6 @@ class TwitterHideReplyBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterHideReplyBlock.Input,
|
||||
output_schema=TwitterHideReplyBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -111,7 +109,6 @@ class TwitterUnhideReplyBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnhideReplyBlock.Input,
|
||||
output_schema=TwitterUnhideReplyBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -61,7 +60,6 @@ class TwitterLikeTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterLikeTweetBlock.Input,
|
||||
output_schema=TwitterLikeTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -161,7 +159,6 @@ class TwitterGetLikingUsersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetLikingUsersBlock.Input,
|
||||
output_schema=TwitterGetLikingUsersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"max_results": 1,
|
||||
@@ -340,7 +337,6 @@ class TwitterGetLikedTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetLikedTweetsBlock.Input,
|
||||
output_schema=TwitterGetLikedTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"user_id": "1234567890",
|
||||
"max_results": 2,
|
||||
@@ -535,7 +531,6 @@ class TwitterUnlikeTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnlikeTweetBlock.Input,
|
||||
output_schema=TwitterUnlikeTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -8,7 +8,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -132,7 +131,6 @@ class TwitterPostTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterPostTweetBlock.Input,
|
||||
output_schema=TwitterPostTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_text": "This is a test tweet.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -265,7 +263,6 @@ class TwitterDeleteTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterDeleteTweetBlock.Input,
|
||||
output_schema=TwitterDeleteTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -360,7 +357,6 @@ class TwitterSearchRecentTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterSearchRecentTweetsBlock.Input,
|
||||
output_schema=TwitterSearchRecentTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"query": "from:twitterapi #twitterapi",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -87,7 +86,6 @@ class TwitterGetQuoteTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetQuoteTweetsBlock.Input,
|
||||
output_schema=TwitterGetQuoteTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"max_results": 2,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -53,7 +52,6 @@ class TwitterRetweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRetweetBlock.Input,
|
||||
output_schema=TwitterRetweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -130,7 +128,6 @@ class TwitterRemoveRetweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveRetweetBlock.Input,
|
||||
output_schema=TwitterRemoveRetweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -234,7 +231,6 @@ class TwitterGetRetweetersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetRetweetersBlock.Input,
|
||||
output_schema=TwitterGetRetweetersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -7,7 +7,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -92,7 +91,6 @@ class TwitterGetUserMentionsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserMentionsBlock.Input,
|
||||
output_schema=TwitterGetUserMentionsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -334,7 +332,6 @@ class TwitterGetHomeTimelineBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetHomeTimelineBlock.Input,
|
||||
output_schema=TwitterGetHomeTimelineBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 2,
|
||||
@@ -571,7 +568,6 @@ class TwitterGetUserTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserTweetsBlock.Input,
|
||||
output_schema=TwitterGetUserTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -68,7 +67,6 @@ class TwitterGetTweetBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetTweetBlock.Input,
|
||||
output_schema=TwitterGetTweetBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_id": "1460323737035677698",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -231,7 +229,6 @@ class TwitterGetTweetsBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetTweetsBlock.Input,
|
||||
output_schema=TwitterGetTweetsBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"tweet_ids": ["1460323737035677698"],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -65,7 +64,6 @@ class TwitterGetBlockedUsersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetBlockedUsersBlock.Input,
|
||||
output_schema=TwitterGetBlockedUsersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"max_results": 10,
|
||||
"pagination_token": "",
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -56,7 +55,6 @@ class TwitterUnfollowUserBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnfollowUserBlock.Input,
|
||||
output_schema=TwitterUnfollowUserBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -128,7 +126,6 @@ class TwitterFollowUserBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterFollowUserBlock.Input,
|
||||
output_schema=TwitterFollowUserBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -216,7 +213,6 @@ class TwitterGetFollowersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetFollowersBlock.Input,
|
||||
output_schema=TwitterGetFollowersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"max_results": 1,
|
||||
@@ -390,7 +386,6 @@ class TwitterGetFollowingBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetFollowingBlock.Input,
|
||||
output_schema=TwitterGetFollowingBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"max_results": 1,
|
||||
|
||||
@@ -6,7 +6,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -56,7 +55,6 @@ class TwitterUnmuteUserBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnmuteUserBlock.Input,
|
||||
output_schema=TwitterUnmuteUserBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -141,7 +139,6 @@ class TwitterGetMutedUsersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetMutedUsersBlock.Input,
|
||||
output_schema=TwitterGetMutedUsersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"max_results": 2,
|
||||
"pagination_token": "",
|
||||
@@ -292,7 +289,6 @@ class TwitterMuteUserBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterMuteUserBlock.Input,
|
||||
output_schema=TwitterMuteUserBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
|
||||
@@ -7,7 +7,6 @@ from tweepy.client import Response
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TWITTER_OAUTH_IS_CONFIGURED,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
@@ -76,7 +75,6 @@ class TwitterGetUserBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserBlock.Input,
|
||||
output_schema=TwitterGetUserBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"identifier": {"discriminator": "username", "username": "twitter"},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
@@ -202,7 +200,7 @@ class UserIdList(BaseModel):
|
||||
user_ids: list[str] = SchemaField(
|
||||
description="List of user IDs to lookup (max 100)",
|
||||
placeholder="Enter user IDs",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -212,7 +210,7 @@ class UsernameList(BaseModel):
|
||||
usernames: list[str] = SchemaField(
|
||||
description="List of Twitter usernames/handles to lookup (max 100)",
|
||||
placeholder="Enter usernames",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -253,7 +251,6 @@ class TwitterGetUsersBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUsersBlock.Input,
|
||||
output_schema=TwitterGetUsersBlock.Output,
|
||||
disabled=not TWITTER_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"identifier": {
|
||||
"discriminator": "username_list",
|
||||
|
||||
@@ -8,6 +8,7 @@ import pathlib
|
||||
import click
|
||||
import psutil
|
||||
|
||||
from backend import app
|
||||
from backend.util.process import AppProcess
|
||||
|
||||
|
||||
@@ -41,13 +42,8 @@ def write_pid(pid: int):
|
||||
|
||||
class MainApp(AppProcess):
|
||||
def run(self):
|
||||
from backend import app
|
||||
|
||||
app.main(silent=True)
|
||||
|
||||
def cleanup(self):
|
||||
pass
|
||||
|
||||
|
||||
@click.group()
|
||||
def main():
|
||||
@@ -224,8 +220,9 @@ def event():
|
||||
|
||||
@test.command()
|
||||
@click.argument("server_address")
|
||||
@click.argument("graph_exec_id")
|
||||
def websocket(server_address: str, graph_exec_id: str):
|
||||
@click.argument("graph_id")
|
||||
@click.argument("graph_version")
|
||||
def websocket(server_address: str, graph_id: str, graph_version: int):
|
||||
"""
|
||||
Tests the websocket connection.
|
||||
"""
|
||||
@@ -233,20 +230,16 @@ def websocket(server_address: str, graph_exec_id: str):
|
||||
|
||||
import websockets.asyncio.client
|
||||
|
||||
from backend.server.ws_api import (
|
||||
WSMessage,
|
||||
WSMethod,
|
||||
WSSubscribeGraphExecutionRequest,
|
||||
)
|
||||
from backend.server.ws_api import ExecutionSubscription, Methods, WsMessage
|
||||
|
||||
async def send_message(server_address: str):
|
||||
uri = f"ws://{server_address}"
|
||||
async with websockets.asyncio.client.connect(uri) as websocket:
|
||||
try:
|
||||
msg = WSMessage(
|
||||
method=WSMethod.SUBSCRIBE_GRAPH_EXEC,
|
||||
data=WSSubscribeGraphExecutionRequest(
|
||||
graph_exec_id=graph_exec_id,
|
||||
msg = WsMessage(
|
||||
method=Methods.SUBSCRIBE,
|
||||
data=ExecutionSubscription(
|
||||
graph_id=graph_id, graph_version=graph_version
|
||||
).model_dump(),
|
||||
).model_dump_json()
|
||||
await websocket.send(msg)
|
||||
|
||||
@@ -12,12 +12,12 @@ async def log_raw_analytics(
|
||||
data_index: str,
|
||||
):
|
||||
details = await prisma.models.AnalyticsDetails.prisma().create(
|
||||
data=prisma.types.AnalyticsDetailsCreateInput(
|
||||
userId=user_id,
|
||||
type=type,
|
||||
data=prisma.Json(data),
|
||||
dataIndex=data_index,
|
||||
)
|
||||
data={
|
||||
"userId": user_id,
|
||||
"type": type,
|
||||
"data": prisma.Json(data),
|
||||
"dataIndex": data_index,
|
||||
}
|
||||
)
|
||||
return details
|
||||
|
||||
@@ -32,12 +32,12 @@ async def log_raw_metric(
|
||||
raise ValueError("metric_value must be non-negative")
|
||||
|
||||
result = await prisma.models.AnalyticsMetrics.prisma().create(
|
||||
data=prisma.types.AnalyticsMetricsCreateInput(
|
||||
value=metric_value,
|
||||
analyticMetric=metric_name,
|
||||
userId=user_id,
|
||||
dataString=data_string,
|
||||
)
|
||||
data={
|
||||
"value": metric_value,
|
||||
"analyticMetric": metric_name,
|
||||
"userId": user_id,
|
||||
"dataString": data_string,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -17,18 +17,15 @@ from typing import (
|
||||
import jsonref
|
||||
import jsonschema
|
||||
from prisma.models import AgentBlock
|
||||
from prisma.types import AgentBlockCreateInput
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import NodeExecutionStats
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util import json
|
||||
from backend.util.settings import Config
|
||||
|
||||
from .model import (
|
||||
ContributorDetails,
|
||||
Credentials,
|
||||
CredentialsFieldInfo,
|
||||
CredentialsMetaInput,
|
||||
is_credentials_field_name,
|
||||
)
|
||||
@@ -53,7 +50,6 @@ class BlockType(Enum):
|
||||
WEBHOOK_MANUAL = "Webhook (manual)"
|
||||
AGENT = "Agent"
|
||||
AI = "AI"
|
||||
AYRSHARE = "Ayrshare"
|
||||
|
||||
|
||||
class BlockCategory(Enum):
|
||||
@@ -77,7 +73,6 @@ class BlockCategory(Enum):
|
||||
PRODUCTIVITY = "Block that helps with productivity"
|
||||
ISSUE_TRACKING = "Block that helps with issue tracking"
|
||||
MULTIMEDIA = "Block that interacts with multimedia content"
|
||||
MARKETING = "Block that helps with marketing"
|
||||
|
||||
def dict(self) -> dict[str, str]:
|
||||
return {"category": self.name, "description": self.value}
|
||||
@@ -124,26 +119,21 @@ class BlockSchema(BaseModel):
|
||||
def get_mismatch_error(cls, data: BlockInput) -> str | None:
|
||||
return cls.validate_data(data)
|
||||
|
||||
@classmethod
|
||||
def get_field_schema(cls, field_name: str) -> dict[str, Any]:
|
||||
model_schema = cls.jsonschema().get("properties", {})
|
||||
if not model_schema:
|
||||
raise ValueError(f"Invalid model schema {cls}")
|
||||
|
||||
property_schema = model_schema.get(field_name)
|
||||
if not property_schema:
|
||||
raise ValueError(f"Invalid property name {field_name}")
|
||||
|
||||
return property_schema
|
||||
|
||||
@classmethod
|
||||
def validate_field(cls, field_name: str, data: BlockInput) -> str | None:
|
||||
"""
|
||||
Validate the data against a specific property (one of the input/output name).
|
||||
Returns the validation error message if the data does not match the schema.
|
||||
"""
|
||||
model_schema = cls.jsonschema().get("properties", {})
|
||||
if not model_schema:
|
||||
return f"Invalid model schema {cls}"
|
||||
|
||||
property_schema = model_schema.get(field_name)
|
||||
if not property_schema:
|
||||
return f"Invalid property name {field_name}"
|
||||
|
||||
try:
|
||||
property_schema = cls.get_field_schema(field_name)
|
||||
jsonschema.validate(json.to_dict(data), property_schema)
|
||||
return None
|
||||
except jsonschema.ValidationError as e:
|
||||
@@ -206,15 +196,6 @@ class BlockSchema(BaseModel):
|
||||
)
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_credentials_fields_info(cls) -> dict[str, CredentialsFieldInfo]:
|
||||
return {
|
||||
field_name: CredentialsFieldInfo.model_validate(
|
||||
cls.get_field_schema(field_name), by_alias=True
|
||||
)
|
||||
for field_name in cls.get_credentials_fields().keys()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_input_defaults(cls, data: BlockInput) -> BlockInput:
|
||||
return data # Return as is, by default.
|
||||
@@ -244,7 +225,7 @@ class BlockManualWebhookConfig(BaseModel):
|
||||
the user has to manually set up the webhook at the provider.
|
||||
"""
|
||||
|
||||
provider: ProviderName
|
||||
provider: str
|
||||
"""The service provider that the webhook connects to"""
|
||||
|
||||
webhook_type: str
|
||||
@@ -480,9 +461,9 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
|
||||
|
||||
def get_blocks() -> dict[str, Type[Block]]:
|
||||
from backend.blocks import load_all_blocks
|
||||
from backend.blocks import AVAILABLE_BLOCKS # noqa: E402
|
||||
|
||||
return load_all_blocks()
|
||||
return AVAILABLE_BLOCKS
|
||||
|
||||
|
||||
async def initialize_blocks() -> None:
|
||||
@@ -493,12 +474,12 @@ async def initialize_blocks() -> None:
|
||||
)
|
||||
if not existing_block:
|
||||
await AgentBlock.prisma().create(
|
||||
data=AgentBlockCreateInput(
|
||||
id=block.id,
|
||||
name=block.name,
|
||||
inputSchema=json.dumps(block.input_schema.jsonschema()),
|
||||
outputSchema=json.dumps(block.output_schema.jsonschema()),
|
||||
)
|
||||
data={
|
||||
"id": block.id,
|
||||
"name": block.name,
|
||||
"inputSchema": json.dumps(block.input_schema.jsonschema()),
|
||||
"outputSchema": json.dumps(block.output_schema.jsonschema()),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
@@ -521,7 +502,6 @@ async def initialize_blocks() -> None:
|
||||
)
|
||||
|
||||
|
||||
# Note on the return type annotation: https://github.com/microsoft/pyright/issues/10281
|
||||
def get_block(block_id: str) -> Block[BlockSchema, BlockSchema] | None:
|
||||
def get_block(block_id: str) -> Block | None:
|
||||
cls = get_blocks().get(block_id)
|
||||
return cls() if cls else None
|
||||
|
||||
@@ -2,6 +2,11 @@ from typing import Type
|
||||
|
||||
from backend.blocks.ai_music_generator import AIMusicGeneratorBlock
|
||||
from backend.blocks.ai_shortform_video_block import AIShortformVideoCreatorBlock
|
||||
from backend.blocks.airtable.airtable import (
|
||||
AirtableFieldsBlock,
|
||||
AirtableRecordsBlock,
|
||||
AirtableTablesBlock,
|
||||
)
|
||||
from backend.blocks.ideogram import IdeogramModelBlock
|
||||
from backend.blocks.jina.embeddings import JinaEmbeddingBlock
|
||||
from backend.blocks.jina.search import ExtractWebsiteContentBlock, SearchTheWebBlock
|
||||
@@ -21,12 +26,12 @@ from backend.blocks.text_to_speech_block import UnrealTextToSpeechBlock
|
||||
from backend.data.block import Block
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.integrations.credentials_store import (
|
||||
airtable_credentials,
|
||||
anthropic_credentials,
|
||||
did_credentials,
|
||||
groq_credentials,
|
||||
ideogram_credentials,
|
||||
jina_credentials,
|
||||
llama_api_credentials,
|
||||
open_router_credentials,
|
||||
openai_credentials,
|
||||
replicate_credentials,
|
||||
@@ -37,17 +42,14 @@ from backend.integrations.credentials_store import (
|
||||
# =============== Configure the cost for each LLM Model call =============== #
|
||||
|
||||
MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.O3: 7,
|
||||
LlmModel.O3_MINI: 2, # $1.10 / $4.40
|
||||
LlmModel.O1: 16, # $15 / $60
|
||||
LlmModel.O1_PREVIEW: 16,
|
||||
LlmModel.O1_MINI: 4,
|
||||
LlmModel.GPT41: 2,
|
||||
LlmModel.GPT4O_MINI: 1,
|
||||
LlmModel.GPT4O: 3,
|
||||
LlmModel.GPT4_TURBO: 10,
|
||||
LlmModel.GPT3_5_TURBO: 1,
|
||||
LlmModel.CLAUDE_3_7_SONNET: 5,
|
||||
LlmModel.CLAUDE_3_5_SONNET: 4,
|
||||
LlmModel.CLAUDE_3_5_HAIKU: 1, # $0.80 / $4.00
|
||||
LlmModel.CLAUDE_3_HAIKU: 1,
|
||||
@@ -64,7 +66,6 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.DEEPSEEK_LLAMA_70B: 1, # ? / ?
|
||||
LlmModel.OLLAMA_DOLPHIN: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5: 1,
|
||||
LlmModel.GEMINI_2_5_PRO: 4,
|
||||
LlmModel.GROK_BETA: 5,
|
||||
LlmModel.MISTRAL_NEMO: 1,
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||
@@ -80,12 +81,6 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: 1,
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: 1,
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: 1,
|
||||
LlmModel.META_LLAMA_4_SCOUT: 1,
|
||||
LlmModel.META_LLAMA_4_MAVERICK: 1,
|
||||
LlmModel.LLAMA_API_LLAMA_4_SCOUT: 1,
|
||||
LlmModel.LLAMA_API_LLAMA4_MAVERICK: 1,
|
||||
LlmModel.LLAMA_API_LLAMA3_3_8B: 1,
|
||||
LlmModel.LLAMA_API_LLAMA3_3_70B: 1,
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -158,23 +153,6 @@ LLM_COST = (
|
||||
for model, cost in MODEL_COST.items()
|
||||
if MODEL_METADATA[model].provider == "open_router"
|
||||
]
|
||||
# Llama API Models
|
||||
+ [
|
||||
BlockCost(
|
||||
cost_type=BlockCostType.RUN,
|
||||
cost_filter={
|
||||
"model": model,
|
||||
"credentials": {
|
||||
"id": llama_api_credentials.id,
|
||||
"provider": llama_api_credentials.provider,
|
||||
"type": llama_api_credentials.type,
|
||||
},
|
||||
},
|
||||
cost_amount=cost,
|
||||
)
|
||||
for model, cost in MODEL_COST.items()
|
||||
if MODEL_METADATA[model].provider == "llama_api"
|
||||
]
|
||||
)
|
||||
|
||||
# =============== This is the exhaustive list of cost for each Block =============== #
|
||||
@@ -294,5 +272,41 @@ BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
|
||||
},
|
||||
)
|
||||
],
|
||||
AirtableTablesBlock: [
|
||||
BlockCost(
|
||||
cost_amount=1,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": airtable_credentials.id,
|
||||
"provider": airtable_credentials.provider,
|
||||
"type": airtable_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
AirtableFieldsBlock: [
|
||||
BlockCost(
|
||||
cost_amount=1,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": airtable_credentials.id,
|
||||
"provider": airtable_credentials.provider,
|
||||
"type": airtable_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
AirtableRecordsBlock: [
|
||||
BlockCost(
|
||||
cost_amount=1,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": airtable_credentials.id,
|
||||
"provider": airtable_credentials.provider,
|
||||
"type": airtable_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
SmartDecisionMakerBlock: LLM_COST,
|
||||
}
|
||||
|
||||
@@ -1,43 +1,39 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, cast
|
||||
|
||||
import stripe
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from prisma import Json
|
||||
from prisma.enums import (
|
||||
CreditRefundRequestStatus,
|
||||
CreditTransactionType,
|
||||
NotificationType,
|
||||
OnboardingStep,
|
||||
)
|
||||
from prisma.errors import UniqueViolationError
|
||||
from prisma.models import CreditRefundRequest, CreditTransaction, User
|
||||
from prisma.types import (
|
||||
CreditRefundRequestCreateInput,
|
||||
CreditTransactionCreateInput,
|
||||
CreditTransactionWhereInput,
|
||||
)
|
||||
from prisma.types import CreditTransactionCreateInput, CreditTransactionWhereInput
|
||||
from pydantic import BaseModel
|
||||
from tenacity import retry, stop_after_attempt, wait_exponential
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, BlockInput, get_block
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.data.execution import NodeExecutionEntry
|
||||
from backend.data.model import (
|
||||
AutoTopUpConfig,
|
||||
RefundRequest,
|
||||
TopUpType,
|
||||
TransactionHistory,
|
||||
UserTransaction,
|
||||
)
|
||||
from backend.data.notifications import NotificationEventModel, RefundRequestData
|
||||
from backend.data.user import get_user_by_id, get_user_email_by_id
|
||||
from backend.notifications.notifications import queue_notification_async
|
||||
from backend.server.model import Pagination
|
||||
from backend.server.v2.admin.model import UserHistoryResponse
|
||||
from backend.data.notifications import NotificationEventDTO, RefundRequestData
|
||||
from backend.data.user import get_user_by_id
|
||||
from backend.notifications import NotificationManager
|
||||
from backend.util.exceptions import InsufficientBalanceError
|
||||
from backend.util.retry import func_retry
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
settings = Settings()
|
||||
@@ -46,17 +42,6 @@ logger = logging.getLogger(__name__)
|
||||
base_url = settings.config.frontend_base_url or settings.config.platform_base_url
|
||||
|
||||
|
||||
class UsageTransactionMetadata(BaseModel):
|
||||
graph_exec_id: str | None = None
|
||||
graph_id: str | None = None
|
||||
node_id: str | None = None
|
||||
node_exec_id: str | None = None
|
||||
block_id: str | None = None
|
||||
block: str | None = None
|
||||
input: dict[str, Any] | None = None
|
||||
reason: str | None = None
|
||||
|
||||
|
||||
class UserCreditBase(ABC):
|
||||
@abstractmethod
|
||||
async def get_credits(self, user_id: str) -> int:
|
||||
@@ -106,20 +91,20 @@ class UserCreditBase(ABC):
|
||||
@abstractmethod
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
cost: int,
|
||||
metadata: UsageTransactionMetadata,
|
||||
entry: NodeExecutionEntry,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> int:
|
||||
"""
|
||||
Spend the credits for the user based on the cost.
|
||||
Spend the credits for the user based on the block usage.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
cost (int): The cost to spend.
|
||||
metadata (UsageTransactionMetadata): The metadata of the transaction.
|
||||
entry (NodeExecutionEntry): The node execution identifiers & data.
|
||||
data_size (float): The size of the data being processed.
|
||||
run_time (float): The time taken to run the block.
|
||||
|
||||
Returns:
|
||||
int: The remaining balance.
|
||||
int: amount of credit spent
|
||||
"""
|
||||
pass
|
||||
|
||||
@@ -134,18 +119,6 @@ class UserCreditBase(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def onboarding_reward(self, user_id: str, credits: int, step: OnboardingStep):
|
||||
"""
|
||||
Reward the user with credits for completing an onboarding step.
|
||||
Won't reward if the user has already received credits for the step.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
step (OnboardingStep): The onboarding step.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def top_up_intent(self, user_id: str, amount: int) -> str:
|
||||
"""
|
||||
@@ -238,7 +211,7 @@ class UserCreditBase(ABC):
|
||||
"userId": user_id,
|
||||
"createdAt": {"lte": top_time},
|
||||
"isActive": True,
|
||||
"NOT": [{"runningBalance": None}],
|
||||
"runningBalance": {"not": None}, # type: ignore
|
||||
},
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
@@ -274,7 +247,11 @@ class UserCreditBase(ABC):
|
||||
)
|
||||
return transaction_balance, transaction_time
|
||||
|
||||
@func_retry
|
||||
@retry(
|
||||
stop=stop_after_attempt(5),
|
||||
wait=wait_exponential(multiplier=1, min=1, max=10),
|
||||
reraise=True,
|
||||
)
|
||||
async def _enable_transaction(
|
||||
self,
|
||||
transaction_key: str,
|
||||
@@ -356,51 +333,134 @@ class UserCreditBase(ABC):
|
||||
amount = min(-user_balance, 0)
|
||||
|
||||
# Create the transaction
|
||||
transaction_data = CreditTransactionCreateInput(
|
||||
userId=user_id,
|
||||
amount=amount,
|
||||
runningBalance=user_balance + amount,
|
||||
type=transaction_type,
|
||||
metadata=metadata,
|
||||
isActive=is_active,
|
||||
createdAt=self.time_now(),
|
||||
)
|
||||
transaction_data: CreditTransactionCreateInput = {
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"runningBalance": user_balance + amount,
|
||||
"type": transaction_type,
|
||||
"metadata": metadata,
|
||||
"isActive": is_active,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
if transaction_key:
|
||||
transaction_data["transactionKey"] = transaction_key
|
||||
tx = await CreditTransaction.prisma().create(data=transaction_data)
|
||||
return user_balance + amount, tx.transactionKey
|
||||
|
||||
|
||||
class UsageTransactionMetadata(BaseModel):
|
||||
graph_exec_id: str | None = None
|
||||
graph_id: str | None = None
|
||||
node_id: str | None = None
|
||||
node_exec_id: str | None = None
|
||||
block_id: str | None = None
|
||||
block: str | None = None
|
||||
input: BlockInput | None = None
|
||||
|
||||
|
||||
class UserCredit(UserCreditBase):
|
||||
@thread_cached
|
||||
def notification_client(self) -> NotificationManager:
|
||||
return get_service_client(NotificationManager)
|
||||
|
||||
async def _send_refund_notification(
|
||||
self,
|
||||
notification_request: RefundRequestData,
|
||||
notification_type: NotificationType,
|
||||
):
|
||||
await queue_notification_async(
|
||||
NotificationEventModel(
|
||||
user_id=notification_request.user_id,
|
||||
type=notification_type,
|
||||
data=notification_request,
|
||||
await asyncio.to_thread(
|
||||
lambda: self.notification_client().queue_notification(
|
||||
NotificationEventDTO(
|
||||
user_id=notification_request.user_id,
|
||||
type=notification_type,
|
||||
data=notification_request.model_dump(),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
def _block_usage_cost(
|
||||
self,
|
||||
block: Block,
|
||||
input_data: BlockInput,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> tuple[int, BlockInput]:
|
||||
block_costs = BLOCK_COSTS.get(type(block))
|
||||
if not block_costs:
|
||||
return 0, {}
|
||||
|
||||
for block_cost in block_costs:
|
||||
if not self._is_cost_filter_match(block_cost.cost_filter, input_data):
|
||||
continue
|
||||
|
||||
if block_cost.cost_type == BlockCostType.RUN:
|
||||
return block_cost.cost_amount, block_cost.cost_filter
|
||||
|
||||
if block_cost.cost_type == BlockCostType.SECOND:
|
||||
return (
|
||||
int(run_time * block_cost.cost_amount),
|
||||
block_cost.cost_filter,
|
||||
)
|
||||
|
||||
if block_cost.cost_type == BlockCostType.BYTE:
|
||||
return (
|
||||
int(data_size * block_cost.cost_amount),
|
||||
block_cost.cost_filter,
|
||||
)
|
||||
|
||||
return 0, {}
|
||||
|
||||
def _is_cost_filter_match(
|
||||
self, cost_filter: BlockInput, input_data: BlockInput
|
||||
) -> bool:
|
||||
"""
|
||||
Filter rules:
|
||||
- If cost_filter is an object, then check if cost_filter is the subset of input_data
|
||||
- Otherwise, check if cost_filter is equal to input_data.
|
||||
- Undefined, null, and empty string are considered as equal.
|
||||
"""
|
||||
if not isinstance(cost_filter, dict) or not isinstance(input_data, dict):
|
||||
return cost_filter == input_data
|
||||
|
||||
return all(
|
||||
(not input_data.get(k) and not v)
|
||||
or (input_data.get(k) and self._is_cost_filter_match(v, input_data[k]))
|
||||
for k, v in cost_filter.items()
|
||||
)
|
||||
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
cost: int,
|
||||
metadata: UsageTransactionMetadata,
|
||||
entry: NodeExecutionEntry,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> int:
|
||||
block = get_block(entry.block_id)
|
||||
if not block:
|
||||
raise ValueError(f"Block not found: {entry.block_id}")
|
||||
|
||||
cost, matching_filter = self._block_usage_cost(
|
||||
block=block, input_data=entry.data, data_size=data_size, run_time=run_time
|
||||
)
|
||||
if cost == 0:
|
||||
return 0
|
||||
|
||||
balance, _ = await self._add_transaction(
|
||||
user_id=user_id,
|
||||
user_id=entry.user_id,
|
||||
amount=-cost,
|
||||
transaction_type=CreditTransactionType.USAGE,
|
||||
metadata=Json(metadata.model_dump()),
|
||||
metadata=Json(
|
||||
UsageTransactionMetadata(
|
||||
graph_exec_id=entry.graph_exec_id,
|
||||
graph_id=entry.graph_id,
|
||||
node_id=entry.node_id,
|
||||
node_exec_id=entry.node_exec_id,
|
||||
block_id=entry.block_id,
|
||||
block=block.name,
|
||||
input=matching_filter,
|
||||
).model_dump()
|
||||
),
|
||||
)
|
||||
user_id = entry.user_id
|
||||
|
||||
# Auto top-up if balance is below threshold.
|
||||
auto_top_up = await get_auto_top_up(user_id)
|
||||
@@ -410,9 +470,8 @@ class UserCredit(UserCreditBase):
|
||||
user_id=user_id,
|
||||
amount=auto_top_up.amount,
|
||||
# Avoid multiple auto top-ups within the same graph execution.
|
||||
key=f"AUTO-TOP-UP-{user_id}-{metadata.graph_exec_id}",
|
||||
key=f"AUTO-TOP-UP-{user_id}-{entry.graph_exec_id}",
|
||||
ceiling_balance=auto_top_up.threshold,
|
||||
top_up_type=TopUpType.AUTO,
|
||||
)
|
||||
except Exception as e:
|
||||
# Failed top-up is not critical, we can move on.
|
||||
@@ -420,32 +479,10 @@ class UserCredit(UserCreditBase):
|
||||
f"Auto top-up failed for user {user_id}, balance: {balance}, amount: {auto_top_up.amount}, error: {e}"
|
||||
)
|
||||
|
||||
return balance
|
||||
return cost
|
||||
|
||||
async def top_up_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
amount: int,
|
||||
top_up_type: TopUpType = TopUpType.UNCATEGORIZED,
|
||||
):
|
||||
await self._top_up_credits(
|
||||
user_id=user_id, amount=amount, top_up_type=top_up_type
|
||||
)
|
||||
|
||||
async def onboarding_reward(self, user_id: str, credits: int, step: OnboardingStep):
|
||||
try:
|
||||
await self._add_transaction(
|
||||
user_id=user_id,
|
||||
amount=credits,
|
||||
transaction_type=CreditTransactionType.GRANT,
|
||||
transaction_key=f"REWARD-{user_id}-{step.value}",
|
||||
metadata=Json(
|
||||
{"reason": f"Reward for completing {step.value} onboarding step."}
|
||||
),
|
||||
)
|
||||
except UniqueViolationError:
|
||||
# Already rewarded for this step
|
||||
pass
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
await self._top_up_credits(user_id, amount)
|
||||
|
||||
async def top_up_refund(
|
||||
self, user_id: str, transaction_key: str, metadata: dict[str, str]
|
||||
@@ -465,15 +502,15 @@ class UserCredit(UserCreditBase):
|
||||
|
||||
try:
|
||||
refund_request = await CreditRefundRequest.prisma().create(
|
||||
data=CreditRefundRequestCreateInput(
|
||||
id=refund_key,
|
||||
transactionKey=transaction_key,
|
||||
userId=user_id,
|
||||
amount=amount,
|
||||
reason=metadata.get("reason", ""),
|
||||
status=CreditRefundRequestStatus.PENDING,
|
||||
result="The refund request is under review.",
|
||||
)
|
||||
data={
|
||||
"id": refund_key,
|
||||
"transactionKey": transaction_key,
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"reason": metadata.get("reason", ""),
|
||||
"status": CreditRefundRequestStatus.PENDING,
|
||||
"result": "The refund request is under review.",
|
||||
}
|
||||
)
|
||||
except UniqueViolationError:
|
||||
raise ValueError(
|
||||
@@ -610,7 +647,7 @@ class UserCredit(UserCreditBase):
|
||||
|
||||
evidence_text += (
|
||||
f"- {tx.description}: Amount ${tx.amount / 100:.2f} on {tx.transaction_time.isoformat()}, "
|
||||
f"resulting balance ${tx.running_balance / 100:.2f} {additional_comment}\n"
|
||||
f"resulting balance ${tx.balance / 100:.2f} {additional_comment}\n"
|
||||
)
|
||||
evidence_text += (
|
||||
"\nThis evidence demonstrates that the transaction was authorized and that the charged amount was used to render the service as agreed."
|
||||
@@ -629,24 +666,7 @@ class UserCredit(UserCreditBase):
|
||||
amount: int,
|
||||
key: str | None = None,
|
||||
ceiling_balance: int | None = None,
|
||||
top_up_type: TopUpType = TopUpType.UNCATEGORIZED,
|
||||
metadata: dict | None = None,
|
||||
):
|
||||
# init metadata, without sharing it with the world
|
||||
metadata = metadata or {}
|
||||
if not metadata["reason"]:
|
||||
match top_up_type:
|
||||
case TopUpType.MANUAL:
|
||||
metadata["reason"] = {"reason": f"Top up credits for {user_id}"}
|
||||
case TopUpType.AUTO:
|
||||
metadata["reason"] = {
|
||||
"reason": f"Auto top up credits for {user_id}"
|
||||
}
|
||||
case _:
|
||||
metadata["reason"] = {
|
||||
"reason": f"Top up reason unknown for {user_id}"
|
||||
}
|
||||
|
||||
if amount < 0:
|
||||
raise ValueError(f"Top up amount must not be negative: {amount}")
|
||||
|
||||
@@ -669,7 +689,6 @@ class UserCredit(UserCreditBase):
|
||||
is_active=False,
|
||||
transaction_key=key,
|
||||
ceiling_balance=ceiling_balance,
|
||||
metadata=(Json(metadata)),
|
||||
)
|
||||
|
||||
customer_id = await get_stripe_customer_id(user_id)
|
||||
@@ -812,15 +831,10 @@ class UserCredit(UserCreditBase):
|
||||
# Check the Checkout Session's payment_status property
|
||||
# to determine if fulfillment should be performed
|
||||
if checkout_session.payment_status in ["paid", "no_payment_required"]:
|
||||
if payment_intent := checkout_session.payment_intent:
|
||||
assert isinstance(payment_intent, stripe.PaymentIntent)
|
||||
new_transaction_key = payment_intent.id
|
||||
else:
|
||||
new_transaction_key = None
|
||||
|
||||
assert isinstance(checkout_session.payment_intent, stripe.PaymentIntent)
|
||||
await self._enable_transaction(
|
||||
transaction_key=credit_transaction.transactionKey,
|
||||
new_transaction_key=new_transaction_key,
|
||||
new_transaction_key=checkout_session.payment_intent.id,
|
||||
user_id=credit_transaction.userId,
|
||||
metadata=Json(checkout_session),
|
||||
)
|
||||
@@ -853,9 +867,8 @@ class UserCredit(UserCreditBase):
|
||||
take=transaction_count_limit,
|
||||
)
|
||||
|
||||
# doesn't fill current_balance, reason, user_email, admin_email, or extra_data
|
||||
grouped_transactions: dict[str, UserTransaction] = defaultdict(
|
||||
lambda: UserTransaction(user_id=user_id)
|
||||
lambda: UserTransaction()
|
||||
)
|
||||
tx_time = None
|
||||
for t in transactions:
|
||||
@@ -885,7 +898,7 @@ class UserCredit(UserCreditBase):
|
||||
|
||||
if tx_time > gt.transaction_time:
|
||||
gt.transaction_time = tx_time
|
||||
gt.running_balance = t.runningBalance or 0
|
||||
gt.balance = t.runningBalance or 0
|
||||
|
||||
return TransactionHistory(
|
||||
transactions=list(grouped_transactions.values()),
|
||||
@@ -935,7 +948,6 @@ class BetaUserCredit(UserCredit):
|
||||
amount=max(self.num_user_credits_refill - balance, 0),
|
||||
transaction_type=CreditTransactionType.GRANT,
|
||||
transaction_key=f"MONTHLY-CREDIT-TOP-UP-{cur_time}",
|
||||
metadata=Json({"reason": "Monthly credit refill"}),
|
||||
)
|
||||
return balance
|
||||
except UniqueViolationError:
|
||||
@@ -945,7 +957,7 @@ class BetaUserCredit(UserCredit):
|
||||
|
||||
class DisabledUserCredit(UserCreditBase):
|
||||
async def get_credits(self, *args, **kwargs) -> int:
|
||||
return 100
|
||||
return 0
|
||||
|
||||
async def get_transaction_history(self, *args, **kwargs) -> TransactionHistory:
|
||||
return TransactionHistory(transactions=[], next_transaction_time=None)
|
||||
@@ -959,9 +971,6 @@ class DisabledUserCredit(UserCreditBase):
|
||||
async def top_up_credits(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
async def onboarding_reward(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
async def top_up_intent(self, *args, **kwargs) -> str:
|
||||
return ""
|
||||
|
||||
@@ -1023,81 +1032,3 @@ async def get_auto_top_up(user_id: str) -> AutoTopUpConfig:
|
||||
return AutoTopUpConfig(threshold=0, amount=0)
|
||||
|
||||
return AutoTopUpConfig.model_validate(user.topUpConfig)
|
||||
|
||||
|
||||
async def admin_get_user_history(
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
search: str | None = None,
|
||||
transaction_filter: CreditTransactionType | None = None,
|
||||
) -> UserHistoryResponse:
|
||||
|
||||
if page < 1 or page_size < 1:
|
||||
raise ValueError("Invalid pagination input")
|
||||
|
||||
where_clause: CreditTransactionWhereInput = {}
|
||||
if transaction_filter:
|
||||
where_clause["type"] = transaction_filter
|
||||
if search:
|
||||
where_clause["OR"] = [
|
||||
{"userId": {"contains": search, "mode": "insensitive"}},
|
||||
{"User": {"is": {"email": {"contains": search, "mode": "insensitive"}}}},
|
||||
{"User": {"is": {"name": {"contains": search, "mode": "insensitive"}}}},
|
||||
]
|
||||
transactions = await CreditTransaction.prisma().find_many(
|
||||
where=where_clause,
|
||||
skip=(page - 1) * page_size,
|
||||
take=page_size,
|
||||
include={"User": True},
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
total = await CreditTransaction.prisma().count(where=where_clause)
|
||||
total_pages = (total + page_size - 1) // page_size
|
||||
|
||||
history = []
|
||||
for tx in transactions:
|
||||
admin_id = ""
|
||||
admin_email = ""
|
||||
reason = ""
|
||||
|
||||
metadata: dict = cast(dict, tx.metadata) or {}
|
||||
|
||||
if metadata:
|
||||
admin_id = metadata.get("admin_id")
|
||||
admin_email = (
|
||||
(await get_user_email_by_id(admin_id) or f"Unknown Admin: {admin_id}")
|
||||
if admin_id
|
||||
else ""
|
||||
)
|
||||
reason = metadata.get("reason", "No reason provided")
|
||||
|
||||
balance, last_update = await get_user_credit_model()._get_credits(tx.userId)
|
||||
|
||||
history.append(
|
||||
UserTransaction(
|
||||
transaction_key=tx.transactionKey,
|
||||
transaction_time=tx.createdAt,
|
||||
transaction_type=tx.type,
|
||||
amount=tx.amount,
|
||||
current_balance=balance,
|
||||
running_balance=tx.runningBalance or 0,
|
||||
user_id=tx.userId,
|
||||
user_email=(
|
||||
tx.User.email
|
||||
if tx.User
|
||||
else (await get_user_by_id(tx.userId)).email
|
||||
),
|
||||
reason=reason,
|
||||
admin_email=admin_email,
|
||||
extra_data=str(metadata),
|
||||
)
|
||||
)
|
||||
return UserHistoryResponse(
|
||||
history=history,
|
||||
pagination=Pagination(
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
current_page=page,
|
||||
page_size=page_size,
|
||||
),
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user