mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 00:28:31 -05:00
Compare commits
125 Commits
detached
...
feature/tw
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c94676e76 | ||
|
|
694b9df9da | ||
|
|
3c264f162c | ||
|
|
ce591cefda | ||
|
|
75ba8c2e83 | ||
|
|
ebd69374a6 | ||
|
|
ea13d970ab | ||
|
|
3cc0be9234 | ||
|
|
36af4a53f8 | ||
|
|
431a87ab6d | ||
|
|
d6297c43fd | ||
|
|
023c5b9a17 | ||
|
|
3344b7f51e | ||
|
|
159af2b89a | ||
|
|
67b17475b1 | ||
|
|
2968453f18 | ||
|
|
db57cecb1f | ||
|
|
1678a1c69c | ||
|
|
49a24f73b7 | ||
|
|
794dd4f59c | ||
|
|
3fb3099814 | ||
|
|
01185c55a1 | ||
|
|
e9e7f28cad | ||
|
|
ca26b298b6 | ||
|
|
10865cd736 | ||
|
|
1663d4273b | ||
|
|
658493559d | ||
|
|
6025506cae | ||
|
|
54f8d3b4dd | ||
|
|
a8339d0748 | ||
|
|
4cc8616c02 | ||
|
|
44722c4b39 | ||
|
|
e33864f5ed | ||
|
|
d3e1319eb3 | ||
|
|
ddac69e0f1 | ||
|
|
1fb9c8c37f | ||
|
|
71310a1b49 | ||
|
|
8e634d7bc3 | ||
|
|
d028f5bd39 | ||
|
|
ca91754bc6 | ||
|
|
8ca80e05a9 | ||
|
|
4f15da99f9 | ||
|
|
54dddbf488 | ||
|
|
356aee1b72 | ||
|
|
ed7c9378eb | ||
|
|
aaf4ee524d | ||
|
|
234e4a35c4 | ||
|
|
4646de463a | ||
|
|
b1d869aad2 | ||
|
|
bb8a37911c | ||
|
|
746f3d4e41 | ||
|
|
89a9354acb | ||
|
|
aa883d8465 | ||
|
|
e8dd0a297e | ||
|
|
9d93704264 | ||
|
|
6ec2bacb72 | ||
|
|
95bd268de8 | ||
|
|
0e10e62bfa | ||
|
|
4d19bcdc5e | ||
|
|
e27d7a2efb | ||
|
|
a386b3ac90 | ||
|
|
41be88f0bf | ||
|
|
53eda98737 | ||
|
|
abd245cb2b | ||
|
|
9e0c296aef | ||
|
|
59f52fb656 | ||
|
|
9f9097c62f | ||
|
|
cd339b0ffc | ||
|
|
569222e9cd | ||
|
|
2fe6eb1df1 | ||
|
|
f588b69484 | ||
|
|
be6d8cbd18 | ||
|
|
2de5e3dd83 | ||
|
|
94a312a279 | ||
|
|
de3c096e23 | ||
|
|
f090f4ca4a | ||
|
|
29c771ba1b | ||
|
|
582e12c766 | ||
|
|
e3cf605e9b | ||
|
|
abf73e8d66 | ||
|
|
b16bf42fa3 | ||
|
|
33b9eef376 | ||
|
|
b8a3ffc04a | ||
|
|
3fd2b7ce4a | ||
|
|
6490b4e188 | ||
|
|
7a9115db18 | ||
|
|
6307ca1841 | ||
|
|
d827d4f9e4 | ||
|
|
984d42234c | ||
|
|
79c0c314e2 | ||
|
|
e6d728b081 | ||
|
|
a7a526e820 | ||
|
|
df431d71ff | ||
|
|
281cd2910b | ||
|
|
6997e2a170 | ||
|
|
1a85eb1dcf | ||
|
|
b62f411518 | ||
|
|
eb79c04855 | ||
|
|
d7c9742d7e | ||
|
|
ea6c9a1152 | ||
|
|
dcfad263cb | ||
|
|
9ad9dd9fe1 | ||
|
|
e2904136bd | ||
|
|
6dba31e021 | ||
|
|
ffc3eff7e2 | ||
|
|
73eafa37c6 | ||
|
|
c621226554 | ||
|
|
227806aef9 | ||
|
|
0272d87af3 | ||
|
|
64f5e60d12 | ||
|
|
6b742d1a8c | ||
|
|
d4edb9371d | ||
|
|
89011aabe0 | ||
|
|
43bd5c89d7 | ||
|
|
0a604a5746 | ||
|
|
5ccfb8e4c6 | ||
|
|
96bba3c1bd | ||
|
|
de1cd6c295 | ||
|
|
3bca279b35 | ||
|
|
7c2e371f23 | ||
|
|
dce9bdd488 | ||
|
|
30bb9a3d72 | ||
|
|
758edaec9e | ||
|
|
be7f9123bb | ||
|
|
5c49fc87fd |
24
.github/dependabot.yml
vendored
24
.github/dependabot.yml
vendored
@@ -7,6 +7,9 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(libs/deps)"
|
||||
prefix-development: "chore(libs/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -26,6 +29,9 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(backend/deps)"
|
||||
prefix-development: "chore(backend/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -38,7 +44,6 @@ updates:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# frontend (Next.js project)
|
||||
- package-ecosystem: "npm"
|
||||
directory: "autogpt_platform/frontend"
|
||||
@@ -46,6 +51,9 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(frontend/deps)"
|
||||
prefix-development: "chore(frontend/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -58,7 +66,6 @@ updates:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# infra (Terraform)
|
||||
- package-ecosystem: "terraform"
|
||||
directory: "autogpt_platform/infra"
|
||||
@@ -66,6 +73,10 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(infra/deps)"
|
||||
prefix-development: "chore(infra/deps-dev)"
|
||||
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -78,7 +89,6 @@ updates:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# market (Poetry project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "autogpt_platform/market"
|
||||
@@ -86,6 +96,9 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(market/deps)"
|
||||
prefix-development: "chore(market/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -146,6 +159,9 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 1
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(platform/deps)"
|
||||
prefix-development: "chore(platform/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
@@ -166,6 +182,8 @@ updates:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 1
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(docs/deps)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
|
||||
13
.github/workflows/platform-backend-ci.yml
vendored
13
.github/workflows/platform-backend-ci.yml
vendored
@@ -6,11 +6,13 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
pull_request:
|
||||
branches: [master, dev, release-*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
@@ -77,6 +79,17 @@ jobs:
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
|
||||
- name: Check poetry.lock
|
||||
run: |
|
||||
poetry lock --no-update
|
||||
|
||||
if ! git diff --quiet poetry.lock; then
|
||||
echo "Error: poetry.lock not up to date."
|
||||
echo
|
||||
git diff poetry.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: poetry install
|
||||
|
||||
|
||||
35
.github/workflows/platform-frontend-ci.yml
vendored
35
.github/workflows/platform-frontend-ci.yml
vendored
@@ -23,6 +23,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@@ -38,24 +39,12 @@ jobs:
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: [chromium, webkit]
|
||||
|
||||
steps:
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
|
||||
# all of these default to true, but feel free to set to
|
||||
# "false" if necessary for your workflow
|
||||
android: false
|
||||
dotnet: false
|
||||
haskell: false
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
@@ -66,6 +55,12 @@ jobs:
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
large-packages: false # slow
|
||||
docker-images: false # limited benefit
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../supabase/docker/.env.example ../.env
|
||||
@@ -86,16 +81,16 @@ jobs:
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
- name: Install Playwright Browsers
|
||||
run: yarn playwright install --with-deps
|
||||
- name: Install Browser '${{ matrix.browser }}'
|
||||
run: yarn playwright install --with-deps ${{ matrix.browser }}
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
yarn test
|
||||
yarn test --project=${{ matrix.browser }}
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
name: playwright-report
|
||||
name: playwright-report-${{ matrix.browser }}
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -173,3 +173,6 @@ LICENSE.rtf
|
||||
autogpt_platform/backend/settings.py
|
||||
/.auth
|
||||
/autogpt_platform/frontend/.auth
|
||||
|
||||
*.ign.*
|
||||
.test-contents
|
||||
|
||||
@@ -98,6 +98,11 @@ repos:
|
||||
files: ^autogpt_platform/autogpt_libs/
|
||||
args: [--fix]
|
||||
|
||||
- id: ruff-format
|
||||
name: Format (Ruff) - AutoGPT Platform - Libs
|
||||
alias: ruff-lint-platform-libs
|
||||
files: ^autogpt_platform/autogpt_libs/
|
||||
|
||||
- repo: local
|
||||
# isort needs the context of which packages are installed to function, so we
|
||||
# can't use a vendored isort pre-commit hook (which runs in its own isolated venv).
|
||||
@@ -140,7 +145,7 @@ repos:
|
||||
# everything in .gitignore, so it works fine without any config or arguments.
|
||||
hooks:
|
||||
- id: black
|
||||
name: Lint (Black)
|
||||
name: Format (Black)
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
|
||||
@@ -8,7 +8,7 @@ We take the security of our project seriously. If you believe you have found a s
|
||||
|
||||
Instead, please report them via:
|
||||
- [GitHub Security Advisory](https://github.com/Significant-Gravitas/AutoGPT/security/advisories/new)
|
||||
- [Huntr.dev](https://huntr.com/repos/significant-gravitas/autogpt) - where you may be eligible for a bounty
|
||||
<!--- [Huntr.dev](https://huntr.com/repos/significant-gravitas/autogpt) - where you may be eligible for a bounty-->
|
||||
|
||||
### Reporting Process
|
||||
1. **Submit Report**: Use one of the above channels to submit your report
|
||||
|
||||
@@ -35,3 +35,12 @@ def verify_user(payload: dict | None, admin_only: bool) -> User:
|
||||
raise fastapi.HTTPException(status_code=403, detail="Admin access required")
|
||||
|
||||
return User.from_payload(payload)
|
||||
|
||||
|
||||
def get_user_id(payload: dict = fastapi.Depends(auth_middleware)) -> str:
|
||||
user_id = payload.get("sub")
|
||||
if not user_id:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=401, detail="User ID not found in token"
|
||||
)
|
||||
return user_id
|
||||
|
||||
@@ -72,7 +72,7 @@ def feature_flag(
|
||||
"""
|
||||
|
||||
def decorator(
|
||||
func: Callable[P, Union[T, Awaitable[T]]]
|
||||
func: Callable[P, Union[T, Awaitable[T]]],
|
||||
) -> Callable[P, Union[T, Awaitable[T]]]:
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import pytest
|
||||
from autogpt_libs.feature_flag.client import feature_flag, mock_flag_variation
|
||||
from ldclient import LDClient
|
||||
|
||||
from autogpt_libs.feature_flag.client import feature_flag, mock_flag_variation
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def ld_client(mocker):
|
||||
|
||||
@@ -6,7 +6,7 @@ class Settings(BaseSettings):
|
||||
launch_darkly_sdk_key: str = Field(
|
||||
default="",
|
||||
description="The Launch Darkly SDK key",
|
||||
validation_alias="LAUNCH_DARKLY_SDK_KEY"
|
||||
validation_alias="LAUNCH_DARKLY_SDK_KEY",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(case_sensitive=True, extra="ignore")
|
||||
|
||||
@@ -23,7 +23,6 @@ DEBUG_LOG_FORMAT = (
|
||||
|
||||
|
||||
class LoggingConfig(BaseSettings):
|
||||
|
||||
level: str = Field(
|
||||
default="INFO",
|
||||
description="Logging level",
|
||||
|
||||
@@ -24,10 +24,10 @@ from .utils import remove_color_codes
|
||||
),
|
||||
("", ""),
|
||||
("hello", "hello"),
|
||||
("hello\x1B[31m world", "hello world"),
|
||||
("\x1B[36mHello,\x1B[32m World!", "Hello, World!"),
|
||||
("hello\x1b[31m world", "hello world"),
|
||||
("\x1b[36mHello,\x1b[32m World!", "Hello, World!"),
|
||||
(
|
||||
"\x1B[1m\x1B[31mError:\x1B[0m\x1B[31m file not found",
|
||||
"\x1b[1m\x1b[31mError:\x1b[0m\x1b[31m file not found",
|
||||
"Error: file not found",
|
||||
),
|
||||
],
|
||||
|
||||
@@ -0,0 +1,31 @@
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class RateLimitSettings(BaseSettings):
|
||||
redis_host: str = Field(
|
||||
default="redis://localhost:6379",
|
||||
description="Redis host",
|
||||
validation_alias="REDIS_HOST",
|
||||
)
|
||||
|
||||
redis_port: str = Field(
|
||||
default="6379", description="Redis port", validation_alias="REDIS_PORT"
|
||||
)
|
||||
|
||||
redis_password: str = Field(
|
||||
default="password",
|
||||
description="Redis password",
|
||||
validation_alias="REDIS_PASSWORD",
|
||||
)
|
||||
|
||||
requests_per_minute: int = Field(
|
||||
default=60,
|
||||
description="Maximum number of requests allowed per minute per API key",
|
||||
validation_alias="RATE_LIMIT_REQUESTS_PER_MINUTE",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(case_sensitive=True, extra="ignore")
|
||||
|
||||
|
||||
RATE_LIMIT_SETTINGS = RateLimitSettings()
|
||||
@@ -0,0 +1,51 @@
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
from redis import Redis
|
||||
|
||||
from .config import RATE_LIMIT_SETTINGS
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
def __init__(
|
||||
self,
|
||||
redis_host: str = RATE_LIMIT_SETTINGS.redis_host,
|
||||
redis_port: str = RATE_LIMIT_SETTINGS.redis_port,
|
||||
redis_password: str = RATE_LIMIT_SETTINGS.redis_password,
|
||||
requests_per_minute: int = RATE_LIMIT_SETTINGS.requests_per_minute,
|
||||
):
|
||||
self.redis = Redis(
|
||||
host=redis_host,
|
||||
port=int(redis_port),
|
||||
password=redis_password,
|
||||
decode_responses=True,
|
||||
)
|
||||
self.window = 60
|
||||
self.max_requests = requests_per_minute
|
||||
|
||||
async def check_rate_limit(self, api_key_id: str) -> Tuple[bool, int, int]:
|
||||
"""
|
||||
Check if request is within rate limits.
|
||||
|
||||
Args:
|
||||
api_key_id: The API key identifier to check
|
||||
|
||||
Returns:
|
||||
Tuple of (is_allowed, remaining_requests, reset_time)
|
||||
"""
|
||||
now = time.time()
|
||||
window_start = now - self.window
|
||||
key = f"ratelimit:{api_key_id}:1min"
|
||||
|
||||
pipe = self.redis.pipeline()
|
||||
pipe.zremrangebyscore(key, 0, window_start)
|
||||
pipe.zadd(key, {str(now): now})
|
||||
pipe.zcount(key, window_start, now)
|
||||
pipe.expire(key, self.window)
|
||||
|
||||
_, _, request_count, _ = pipe.execute()
|
||||
|
||||
remaining = max(0, self.max_requests - request_count)
|
||||
reset_time = int(now + self.window)
|
||||
|
||||
return request_count <= self.max_requests, remaining, reset_time
|
||||
@@ -0,0 +1,32 @@
|
||||
from fastapi import HTTPException, Request
|
||||
from starlette.middleware.base import RequestResponseEndpoint
|
||||
|
||||
from .limiter import RateLimiter
|
||||
|
||||
|
||||
async def rate_limit_middleware(request: Request, call_next: RequestResponseEndpoint):
|
||||
"""FastAPI middleware for rate limiting API requests."""
|
||||
limiter = RateLimiter()
|
||||
|
||||
if not request.url.path.startswith("/api"):
|
||||
return await call_next(request)
|
||||
|
||||
api_key = request.headers.get("Authorization")
|
||||
if not api_key:
|
||||
return await call_next(request)
|
||||
|
||||
api_key = api_key.replace("Bearer ", "")
|
||||
|
||||
is_allowed, remaining, reset_time = await limiter.check_rate_limit(api_key)
|
||||
|
||||
if not is_allowed:
|
||||
raise HTTPException(
|
||||
status_code=429, detail="Rate limit exceeded. Please try again later."
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
response.headers["X-RateLimit-Limit"] = str(limiter.max_requests)
|
||||
response.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
response.headers["X-RateLimit-Reset"] = str(reset_time)
|
||||
|
||||
return response
|
||||
@@ -0,0 +1,76 @@
|
||||
from typing import Annotated, Any, Literal, Optional, TypedDict
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr, field_serializer
|
||||
|
||||
|
||||
class _BaseCredentials(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid4()))
|
||||
provider: str
|
||||
title: Optional[str]
|
||||
|
||||
@field_serializer("*")
|
||||
def dump_secret_strings(value: Any, _info):
|
||||
if isinstance(value, SecretStr):
|
||||
return value.get_secret_value()
|
||||
return value
|
||||
|
||||
|
||||
class OAuth2Credentials(_BaseCredentials):
|
||||
type: Literal["oauth2"] = "oauth2"
|
||||
username: Optional[str]
|
||||
"""Username of the third-party service user that these credentials belong to"""
|
||||
access_token: SecretStr
|
||||
access_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
|
||||
refresh_token: Optional[SecretStr]
|
||||
refresh_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the refresh token expires (if at all)"""
|
||||
scopes: list[str]
|
||||
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
def bearer(self) -> str:
|
||||
return f"Bearer {self.access_token.get_secret_value()}"
|
||||
|
||||
|
||||
class APIKeyCredentials(_BaseCredentials):
|
||||
type: Literal["api_key"] = "api_key"
|
||||
api_key: SecretStr
|
||||
expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
|
||||
|
||||
def bearer(self) -> str:
|
||||
return f"Bearer {self.api_key.get_secret_value()}"
|
||||
|
||||
|
||||
Credentials = Annotated[
|
||||
OAuth2Credentials | APIKeyCredentials,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
CredentialsType = Literal["api_key", "oauth2"]
|
||||
|
||||
|
||||
class OAuthState(BaseModel):
|
||||
token: str
|
||||
provider: str
|
||||
expires_at: int
|
||||
code_verifier: Optional[str] = None
|
||||
scopes: list[str]
|
||||
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
|
||||
|
||||
|
||||
class UserMetadata(BaseModel):
|
||||
integration_credentials: list[Credentials] = Field(default_factory=list)
|
||||
integration_oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UserMetadataRaw(TypedDict, total=False):
|
||||
integration_credentials: list[dict]
|
||||
integration_oauth_states: list[dict]
|
||||
|
||||
|
||||
class UserIntegrations(BaseModel):
|
||||
credentials: list[Credentials] = Field(default_factory=list)
|
||||
oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
76
autogpt_platform/autogpt_libs/poetry.lock
generated
76
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@@ -1091,13 +1091,13 @@ pyasn1 = ">=0.4.6,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.10.2"
|
||||
version = "2.10.3"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"},
|
||||
{file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"},
|
||||
{file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"},
|
||||
{file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1223,13 +1223,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
version = "2.6.1"
|
||||
version = "2.7.0"
|
||||
description = "Settings management using Pydantic"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87"},
|
||||
{file = "pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0"},
|
||||
{file = "pydantic_settings-2.7.0-py3-none-any.whl", hash = "sha256:e00c05d5fa6cbbb227c84bd7487c5c1065084119b750df7c8c1a554aed236eb5"},
|
||||
{file = "pydantic_settings-2.7.0.tar.gz", hash = "sha256:ac4bfd4a36831a48dbf8b2d9325425b549a0a6f18cea118436d728eb4f1c4d66"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1243,13 +1243,13 @@ yaml = ["pyyaml (>=6.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyjwt"
|
||||
version = "2.10.0"
|
||||
version = "2.10.1"
|
||||
description = "JSON Web Token implementation in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"},
|
||||
{file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"},
|
||||
{file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"},
|
||||
{file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1282,20 +1282,20 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.24.0"
|
||||
version = "0.25.0"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
python-versions = ">=3.9"
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"},
|
||||
{file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"},
|
||||
{file = "pytest_asyncio-0.25.0-py3-none-any.whl", hash = "sha256:db5432d18eac6b7e28b46dcd9b69921b55c3b1086e85febfe04e70b18d9e81b3"},
|
||||
{file = "pytest_asyncio-0.25.0.tar.gz", hash = "sha256:8c0610303c9e0442a5db8604505fc0f545456ba1528824842b37b4a626cbf609"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytest = ">=8.2,<9"
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
|
||||
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"]
|
||||
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
|
||||
|
||||
[[package]]
|
||||
@@ -1362,13 +1362,13 @@ websockets = ">=11,<13"
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "5.2.0"
|
||||
version = "5.2.1"
|
||||
description = "Python client for Redis database and key-value store"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"},
|
||||
{file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"},
|
||||
{file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"},
|
||||
{file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1415,29 +1415,29 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.8.0"
|
||||
version = "0.8.3"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"},
|
||||
{file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"},
|
||||
{file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"},
|
||||
{file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"},
|
||||
{file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"},
|
||||
{file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"},
|
||||
{file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"},
|
||||
{file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"},
|
||||
{file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"},
|
||||
{file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"},
|
||||
{file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"},
|
||||
{file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"},
|
||||
{file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"},
|
||||
{file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"},
|
||||
{file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"},
|
||||
{file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"},
|
||||
{file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"},
|
||||
{file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"},
|
||||
{file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"},
|
||||
{file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1852,4 +1852,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "54bf6e076ec4d09be2307f07240018459dd6594efdc55a2dc2dc1d673184587e"
|
||||
content-hash = "13a36d3be675cab4a3eb2e6a62a1b08df779bded4c7b9164d8be300dc08748d0"
|
||||
|
||||
@@ -10,18 +10,18 @@ packages = [{ include = "autogpt_libs" }]
|
||||
colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.10.2"
|
||||
pydantic-settings = "^2.6.1"
|
||||
pyjwt = "^2.10.0"
|
||||
pytest-asyncio = "^0.24.0"
|
||||
pydantic = "^2.10.3"
|
||||
pydantic-settings = "^2.7.0"
|
||||
pyjwt = "^2.10.1"
|
||||
pytest-asyncio = "^0.25.0"
|
||||
pytest-mock = "^3.14.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.10.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.0"
|
||||
ruff = "^0.8.0"
|
||||
redis = "^5.2.1"
|
||||
ruff = "^0.8.3"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -58,6 +58,11 @@ GITHUB_CLIENT_SECRET=
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
|
||||
# Twitter/X OAuth App server credentials - https://developer.x.com/en/products/x-api
|
||||
TWITTER_CLIENT_ID=
|
||||
TWITTER_CLIENT_SECRET=
|
||||
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
# LLM
|
||||
|
||||
@@ -6,18 +6,23 @@ ENV PYTHONUNBUFFERED 1
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN echo 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
|
||||
|
||||
RUN apt-get update --allow-releaseinfo-change --fix-missing
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev libpq5 gettext libz-dev libssl-dev postgresql-client git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN apt-get install -y build-essential
|
||||
RUN apt-get install -y libpq5
|
||||
RUN apt-get install -y libz-dev
|
||||
RUN apt-get install -y libssl-dev
|
||||
RUN apt-get install -y postgresql-client
|
||||
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_HOME=/opt/poetry
|
||||
ENV POETRY_NO_INTERACTION=1
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
ENV PATH=/opt/poetry/bin:$PATH
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
RUN pip3 install --upgrade pip setuptools
|
||||
|
||||
@@ -39,11 +44,11 @@ FROM python:3.11.10-slim-bookworm AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
ENV POETRY_VERSION=1.8.3
|
||||
ENV POETRY_HOME=/opt/poetry
|
||||
ENV POETRY_NO_INTERACTION=1
|
||||
ENV POETRY_VIRTUALENVS_CREATE=false
|
||||
ENV PATH=/opt/poetry/bin:$PATH
|
||||
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
|
||||
@@ -200,4 +200,4 @@ To add a new agent block, you need to create a new class that inherits from `Blo
|
||||
* `run` method: the main logic of the block.
|
||||
* `test_input` & `test_output`: the sample input and output data for the block, which will be used to auto-test the block.
|
||||
* You can mock the functions declared in the block using the `test_mock` field for your unit tests.
|
||||
* Once you finish creating the block, you can test it by running `pytest -s test/block/test_block.py`.
|
||||
* Once you finish creating the block, you can test it by running `poetry run pytest -s test/block/test_block.py`.
|
||||
|
||||
@@ -15,10 +15,10 @@ modules = [
|
||||
if f.is_file() and f.name != "__init__.py"
|
||||
]
|
||||
for module in modules:
|
||||
if not re.match("^[a-z_.]+$", module):
|
||||
if not re.match("^[a-z0-9_.]+$", module):
|
||||
raise ValueError(
|
||||
f"Block module {module} error: module name must be lowercase, "
|
||||
"separated by underscores, and contain only alphabet characters"
|
||||
"and contain only alphanumeric characters and underscores."
|
||||
)
|
||||
|
||||
importlib.import_module(f".{module}", package=__name__)
|
||||
|
||||
@@ -12,6 +12,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
|
||||
class ImageSize(str, Enum):
|
||||
@@ -101,12 +102,10 @@ class ImageGenModel(str, Enum):
|
||||
|
||||
class AIImageGeneratorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[Literal["replicate"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="replicate",
|
||||
supported_credential_types={"api_key"},
|
||||
description="Enter your Replicate API key to access the image generation API. You can obtain an API key from https://replicate.com/account/api-tokens.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REPLICATE], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Enter your Replicate API key to access the image generation API. You can obtain an API key from https://replicate.com/account/api-tokens.",
|
||||
)
|
||||
prompt: str = SchemaField(
|
||||
description="Text prompt for image generation",
|
||||
|
||||
@@ -13,6 +13,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,13 +55,11 @@ class NormalizationStrategy(str, Enum):
|
||||
|
||||
class AIMusicGeneratorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[Literal["replicate"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="replicate",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Replicate integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REPLICATE], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The Replicate integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
prompt: str = SchemaField(
|
||||
description="A description of the music you want to generate",
|
||||
|
||||
@@ -12,6 +12,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -140,13 +141,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class AIShortformVideoCreatorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[Literal["revid"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="revid",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The revid.ai integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The revid.ai integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
script: str = SchemaField(
|
||||
description="""1. Use short and punctuated sentences\n\n2. Use linebreaks to create a new clip\n\n3. Text outside of brackets is spoken by the AI, and [text between brackets] will be used to guide the visual generation. For example, [close-up of a cat] will show a close-up of a cat.""",
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import re
|
||||
from typing import Any, List
|
||||
|
||||
from jinja2 import BaseLoader, Environment
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.text import TextFormatter
|
||||
|
||||
jinja = Environment(loader=BaseLoader())
|
||||
formatter = TextFormatter()
|
||||
|
||||
|
||||
class StoreValueBlock(Block):
|
||||
@@ -304,9 +302,9 @@ class AgentOutputBlock(Block):
|
||||
"""
|
||||
if input_data.format:
|
||||
try:
|
||||
fmt = re.sub(r"(?<!{){[ a-zA-Z0-9_]+}", r"{\g<0>}", input_data.format)
|
||||
template = jinja.from_string(fmt)
|
||||
yield "output", template.render({input_data.name: input_data.value})
|
||||
yield "output", formatter.format_string(
|
||||
input_data.format, {input_data.name: input_data.value}
|
||||
)
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
@@ -494,3 +492,101 @@ class NoteBlock(Block):
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "output", input_data.text
|
||||
|
||||
|
||||
class CreateDictionaryBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
values: dict[str, Any] = SchemaField(
|
||||
description="Key-value pairs to create the dictionary with",
|
||||
placeholder="e.g., {'name': 'Alice', 'age': 25}",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
dictionary: dict[str, Any] = SchemaField(
|
||||
description="The created dictionary containing the specified key-value pairs"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if dictionary creation failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b924ddf4-de4f-4b56-9a85-358930dcbc91",
|
||||
description="Creates a dictionary with the specified key-value pairs. Use this when you know all the values you want to add upfront.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=CreateDictionaryBlock.Input,
|
||||
output_schema=CreateDictionaryBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"values": {"name": "Alice", "age": 25, "city": "New York"},
|
||||
},
|
||||
{
|
||||
"values": {"numbers": [1, 2, 3], "active": True, "score": 95.5},
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
(
|
||||
"dictionary",
|
||||
{"name": "Alice", "age": 25, "city": "New York"},
|
||||
),
|
||||
(
|
||||
"dictionary",
|
||||
{"numbers": [1, 2, 3], "active": True, "score": 95.5},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
# The values are already validated by Pydantic schema
|
||||
yield "dictionary", input_data.values
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to create dictionary: {str(e)}"
|
||||
|
||||
|
||||
class CreateListBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
values: List[Any] = SchemaField(
|
||||
description="A list of values to be combined into a new list.",
|
||||
placeholder="e.g., ['Alice', 25, True]",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
list: List[Any] = SchemaField(
|
||||
description="The created list containing the specified values."
|
||||
)
|
||||
error: str = SchemaField(description="Error message if list creation failed.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a912d5c7-6e00-4542-b2a9-8034136930e4",
|
||||
description="Creates a list with the specified values. Use this when you know all the values you want to add upfront.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=CreateListBlock.Input,
|
||||
output_schema=CreateListBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"values": ["Alice", 25, True],
|
||||
},
|
||||
{
|
||||
"values": [1, 2, 3, "four", {"key": "value"}],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
(
|
||||
"list",
|
||||
["Alice", 25, True],
|
||||
),
|
||||
(
|
||||
"list",
|
||||
[1, 2, 3, "four", {"key": "value"}],
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
# The values are already validated by Pydantic schema
|
||||
yield "list", input_data.values
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to create list: {str(e)}"
|
||||
|
||||
190
autogpt_platform/backend/backend/blocks/code_executor.py
Normal file
190
autogpt_platform/backend/backend/blocks/code_executor.py
Normal file
@@ -0,0 +1,190 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from e2b_code_interpreter import Sandbox
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="e2b",
|
||||
api_key=SecretStr("mock-e2b-api-key"),
|
||||
title="Mock E2B API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.type,
|
||||
}
|
||||
|
||||
|
||||
class ProgrammingLanguage(Enum):
|
||||
PYTHON = "python"
|
||||
JAVASCRIPT = "js"
|
||||
BASH = "bash"
|
||||
R = "r"
|
||||
JAVA = "java"
|
||||
|
||||
|
||||
class CodeExecutionBlock(Block):
|
||||
# TODO : Add support to upload and download files
|
||||
# Currently, You can customized the CPU and Memory, only by creating a pre customized sandbox template
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.E2B], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Enter your api key for the E2B Sandbox. You can get it in here - https://e2b.dev/docs",
|
||||
)
|
||||
|
||||
# Todo : Option to run commond in background
|
||||
setup_commands: list[str] = SchemaField(
|
||||
description=(
|
||||
"Shell commands to set up the sandbox before running the code. "
|
||||
"You can use `curl` or `git` to install your desired Debian based "
|
||||
"package manager. `pip` and `npm` are pre-installed.\n\n"
|
||||
"These commands are executed with `sh`, in the foreground."
|
||||
),
|
||||
placeholder="pip install cowsay",
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
code: str = SchemaField(
|
||||
description="Code to execute in the sandbox",
|
||||
placeholder="print('Hello, World!')",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
language: ProgrammingLanguage = SchemaField(
|
||||
description="Programming language to execute",
|
||||
default=ProgrammingLanguage.PYTHON,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
timeout: int = SchemaField(
|
||||
description="Execution timeout in seconds", default=300
|
||||
)
|
||||
|
||||
template_id: str = SchemaField(
|
||||
description=(
|
||||
"You can use an E2B sandbox template by entering its ID here. "
|
||||
"Check out the E2B docs for more details: "
|
||||
"[E2B - Sandbox template](https://e2b.dev/docs/sandbox-template)"
|
||||
),
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: str = SchemaField(description="Response from code execution")
|
||||
stdout_logs: str = SchemaField(
|
||||
description="Standard output logs from execution"
|
||||
)
|
||||
stderr_logs: str = SchemaField(description="Standard error logs from execution")
|
||||
error: str = SchemaField(description="Error message if execution failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0b02b072-abe7-11ef-8372-fb5d162dd712",
|
||||
description="Executes code in an isolated sandbox environment with internet access.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=CodeExecutionBlock.Input,
|
||||
output_schema=CodeExecutionBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"code": "print('Hello World')",
|
||||
"language": ProgrammingLanguage.PYTHON.value,
|
||||
"setup_commands": [],
|
||||
"timeout": 300,
|
||||
"template_id": "",
|
||||
},
|
||||
test_output=[
|
||||
("response", "Hello World"),
|
||||
("stdout_logs", "Hello World\n"),
|
||||
],
|
||||
test_mock={
|
||||
"execute_code": lambda code, language, setup_commands, timeout, api_key, template_id: (
|
||||
"Hello World",
|
||||
"Hello World\n",
|
||||
"",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def execute_code(
|
||||
self,
|
||||
code: str,
|
||||
language: ProgrammingLanguage,
|
||||
setup_commands: list[str],
|
||||
timeout: int,
|
||||
api_key: str,
|
||||
template_id: str,
|
||||
):
|
||||
try:
|
||||
sandbox = None
|
||||
if template_id:
|
||||
sandbox = Sandbox(
|
||||
template=template_id, api_key=api_key, timeout=timeout
|
||||
)
|
||||
else:
|
||||
sandbox = Sandbox(api_key=api_key, timeout=timeout)
|
||||
|
||||
if not sandbox:
|
||||
raise Exception("Sandbox not created")
|
||||
|
||||
# Running setup commands
|
||||
for cmd in setup_commands:
|
||||
sandbox.commands.run(cmd)
|
||||
|
||||
# Executing the code
|
||||
execution = sandbox.run_code(
|
||||
code,
|
||||
language=language.value,
|
||||
on_error=lambda e: sandbox.kill(), # Kill the sandbox if there is an error
|
||||
)
|
||||
|
||||
if execution.error:
|
||||
raise Exception(execution.error)
|
||||
|
||||
response = execution.text
|
||||
stdout_logs = "".join(execution.logs.stdout)
|
||||
stderr_logs = "".join(execution.logs.stderr)
|
||||
|
||||
return response, stdout_logs, stderr_logs
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
response, stdout_logs, stderr_logs = self.execute_code(
|
||||
input_data.code,
|
||||
input_data.language,
|
||||
input_data.setup_commands,
|
||||
input_data.timeout,
|
||||
credentials.api_key.get_secret_value(),
|
||||
input_data.template_id,
|
||||
)
|
||||
|
||||
if response:
|
||||
yield "response", response
|
||||
if stdout_logs:
|
||||
yield "stdout_logs", stdout_logs
|
||||
if stderr_logs:
|
||||
yield "stderr_logs", stderr_logs
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
110
autogpt_platform/backend/backend/blocks/code_extraction_block.py
Normal file
110
autogpt_platform/backend/backend/blocks/code_extraction_block.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import re
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class CodeExtractionBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(
|
||||
description="Text containing code blocks to extract (e.g., AI response)",
|
||||
placeholder="Enter text containing code blocks",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
html: str = SchemaField(description="Extracted HTML code")
|
||||
css: str = SchemaField(description="Extracted CSS code")
|
||||
javascript: str = SchemaField(description="Extracted JavaScript code")
|
||||
python: str = SchemaField(description="Extracted Python code")
|
||||
sql: str = SchemaField(description="Extracted SQL code")
|
||||
java: str = SchemaField(description="Extracted Java code")
|
||||
cpp: str = SchemaField(description="Extracted C++ code")
|
||||
csharp: str = SchemaField(description="Extracted C# code")
|
||||
json_code: str = SchemaField(description="Extracted JSON code")
|
||||
bash: str = SchemaField(description="Extracted Bash code")
|
||||
php: str = SchemaField(description="Extracted PHP code")
|
||||
ruby: str = SchemaField(description="Extracted Ruby code")
|
||||
yaml: str = SchemaField(description="Extracted YAML code")
|
||||
markdown: str = SchemaField(description="Extracted Markdown code")
|
||||
typescript: str = SchemaField(description="Extracted TypeScript code")
|
||||
xml: str = SchemaField(description="Extracted XML code")
|
||||
remaining_text: str = SchemaField(
|
||||
description="Remaining text after code extraction"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d3a7d896-3b78-4f44-8b4b-48fbf4f0bcd8",
|
||||
description="Extracts code blocks from text and identifies their programming languages",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=CodeExtractionBlock.Input,
|
||||
output_schema=CodeExtractionBlock.Output,
|
||||
test_input={
|
||||
"text": "Here's a Python example:\n```python\nprint('Hello World')\n```\nAnd some HTML:\n```html\n<h1>Title</h1>\n```"
|
||||
},
|
||||
test_output=[
|
||||
("html", "<h1>Title</h1>"),
|
||||
("python", "print('Hello World')"),
|
||||
("remaining_text", "Here's a Python example:\nAnd some HTML:"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
# List of supported programming languages with mapped aliases
|
||||
language_aliases = {
|
||||
"html": ["html", "htm"],
|
||||
"css": ["css"],
|
||||
"javascript": ["javascript", "js"],
|
||||
"python": ["python", "py"],
|
||||
"sql": ["sql"],
|
||||
"java": ["java"],
|
||||
"cpp": ["cpp", "c++"],
|
||||
"csharp": ["csharp", "c#", "cs"],
|
||||
"json_code": ["json"],
|
||||
"bash": ["bash", "shell", "sh"],
|
||||
"php": ["php"],
|
||||
"ruby": ["ruby", "rb"],
|
||||
"yaml": ["yaml", "yml"],
|
||||
"markdown": ["markdown", "md"],
|
||||
"typescript": ["typescript", "ts"],
|
||||
"xml": ["xml"],
|
||||
}
|
||||
|
||||
# Extract code for each language
|
||||
for canonical_name, aliases in language_aliases.items():
|
||||
code = ""
|
||||
# Try each alias for the language
|
||||
for alias in aliases:
|
||||
code_for_alias = self.extract_code(input_data.text, alias)
|
||||
if code_for_alias:
|
||||
code = code + "\n\n" + code_for_alias if code else code_for_alias
|
||||
|
||||
if code: # Only yield if there's actual code content
|
||||
yield canonical_name, code
|
||||
|
||||
# Remove all code blocks from the text to get remaining text
|
||||
pattern = (
|
||||
r"```(?:"
|
||||
+ "|".join(
|
||||
re.escape(alias)
|
||||
for aliases in language_aliases.values()
|
||||
for alias in aliases
|
||||
)
|
||||
+ r")\s+[\s\S]*?```"
|
||||
)
|
||||
|
||||
remaining_text = re.sub(pattern, "", input_data.text).strip()
|
||||
remaining_text = re.sub(r"\n\s*\n", "\n", remaining_text)
|
||||
|
||||
if remaining_text: # Only yield if there's remaining text
|
||||
yield "remaining_text", remaining_text
|
||||
|
||||
def extract_code(self, text: str, language: str) -> str:
|
||||
# Escape special regex characters in the language string
|
||||
language = re.escape(language)
|
||||
# Extract all code blocks enclosed in ```language``` blocks
|
||||
pattern = re.compile(rf"```{language}\s+(.*?)```", re.DOTALL | re.IGNORECASE)
|
||||
matches = pattern.finditer(text)
|
||||
# Combine all code blocks for this language with newlines between them
|
||||
code_blocks = [match.group(1).strip() for match in matches]
|
||||
return "\n\n".join(code_blocks) if code_blocks else ""
|
||||
59
autogpt_platform/backend/backend/blocks/compass/triggers.py
Normal file
59
autogpt_platform/backend/backend/blocks/compass/triggers.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockManualWebhookConfig,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.webhooks.compass import CompassWebhookType
|
||||
|
||||
|
||||
class Transcription(BaseModel):
|
||||
text: str
|
||||
speaker: str
|
||||
end: float
|
||||
start: float
|
||||
duration: float
|
||||
|
||||
|
||||
class TranscriptionDataModel(BaseModel):
|
||||
date: str
|
||||
transcription: str
|
||||
transcriptions: list[Transcription]
|
||||
|
||||
|
||||
class CompassAITriggerBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
payload: TranscriptionDataModel = SchemaField(hidden=True)
|
||||
|
||||
class Output(BlockSchema):
|
||||
transcription: str = SchemaField(
|
||||
description="The contents of the compass transcription."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9464a020-ed1d-49e1-990f-7f2ac924a2b7",
|
||||
description="This block will output the contents of the compass transcription.",
|
||||
categories={BlockCategory.HARDWARE},
|
||||
input_schema=CompassAITriggerBlock.Input,
|
||||
output_schema=CompassAITriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider="compass",
|
||||
webhook_type=CompassWebhookType.TRANSCRIPTION,
|
||||
),
|
||||
test_input=[
|
||||
{"input": "Hello, World!"},
|
||||
{"input": "Hello, World!", "data": "Existing Data"},
|
||||
],
|
||||
# test_output=[
|
||||
# ("output", "Hello, World!"), # No data provided, so trigger is returned
|
||||
# ("output", "Existing Data"), # Data is provided, so data is returned.
|
||||
# ],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "transcription", input_data.payload.transcription
|
||||
@@ -12,16 +12,15 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
DiscordCredentials = CredentialsMetaInput[Literal["discord"], Literal["api_key"]]
|
||||
DiscordCredentials = CredentialsMetaInput[
|
||||
Literal[ProviderName.DISCORD], Literal["api_key"]
|
||||
]
|
||||
|
||||
|
||||
def DiscordCredentialsField() -> DiscordCredentials:
|
||||
return CredentialsField(
|
||||
description="Discord bot token",
|
||||
provider="discord",
|
||||
supported_credential_types={"api_key"},
|
||||
)
|
||||
return CredentialsField(description="Discord bot token")
|
||||
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
|
||||
32
autogpt_platform/backend/backend/blocks/exa/_auth.py
Normal file
32
autogpt_platform/backend/backend/blocks/exa/_auth.py
Normal file
@@ -0,0 +1,32 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
ExaCredentials = APIKeyCredentials
|
||||
ExaCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.EXA],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="exa",
|
||||
api_key=SecretStr("mock-exa-api-key"),
|
||||
title="Mock Exa API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
def ExaCredentialsField() -> ExaCredentialsInput:
|
||||
"""Creates an Exa credentials input on a block."""
|
||||
return CredentialsField(description="The Exa integration requires an API Key.")
|
||||
87
autogpt_platform/backend/backend/blocks/exa/contents.py
Normal file
87
autogpt_platform/backend/backend/blocks/exa/contents.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.blocks.exa._auth import (
|
||||
ExaCredentials,
|
||||
ExaCredentialsField,
|
||||
ExaCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class ContentRetrievalSettings(BaseModel):
|
||||
text: Optional[dict] = SchemaField(
|
||||
description="Text content settings",
|
||||
default={"maxCharacters": 1000, "includeHtmlTags": False},
|
||||
advanced=True,
|
||||
)
|
||||
highlights: Optional[dict] = SchemaField(
|
||||
description="Highlight settings",
|
||||
default={
|
||||
"numSentences": 3,
|
||||
"highlightsPerUrl": 3,
|
||||
"query": "",
|
||||
},
|
||||
advanced=True,
|
||||
)
|
||||
summary: Optional[dict] = SchemaField(
|
||||
description="Summary settings",
|
||||
default={"query": ""},
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class ExaContentsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: ExaCredentialsInput = ExaCredentialsField()
|
||||
ids: List[str] = SchemaField(
|
||||
description="Array of document IDs obtained from searches",
|
||||
)
|
||||
contents: ContentRetrievalSettings = SchemaField(
|
||||
description="Content retrieval settings",
|
||||
default=ContentRetrievalSettings(),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of document contents",
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c52be83f-f8cd-4180-b243-af35f986b461",
|
||||
description="Retrieves document contents using Exa's contents API",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=ExaContentsBlock.Input,
|
||||
output_schema=ExaContentsBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: ExaCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
url = "https://api.exa.ai/contents"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": credentials.api_key.get_secret_value(),
|
||||
}
|
||||
|
||||
payload = {
|
||||
"ids": input_data.ids,
|
||||
"text": input_data.contents.text,
|
||||
"highlights": input_data.contents.highlights,
|
||||
"summary": input_data.contents.summary,
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
yield "results", data.get("results", [])
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
yield "results", []
|
||||
54
autogpt_platform/backend/backend/blocks/exa/helpers.py
Normal file
54
autogpt_platform/backend/backend/blocks/exa/helpers.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TextSettings(BaseModel):
|
||||
max_characters: int = SchemaField(
|
||||
default=1000,
|
||||
description="Maximum number of characters to return",
|
||||
placeholder="1000",
|
||||
)
|
||||
include_html_tags: bool = SchemaField(
|
||||
default=False,
|
||||
description="Whether to include HTML tags in the text",
|
||||
placeholder="False",
|
||||
)
|
||||
|
||||
|
||||
class HighlightSettings(BaseModel):
|
||||
num_sentences: int = SchemaField(
|
||||
default=3,
|
||||
description="Number of sentences per highlight",
|
||||
placeholder="3",
|
||||
)
|
||||
highlights_per_url: int = SchemaField(
|
||||
default=3,
|
||||
description="Number of highlights per URL",
|
||||
placeholder="3",
|
||||
)
|
||||
|
||||
|
||||
class SummarySettings(BaseModel):
|
||||
query: Optional[str] = SchemaField(
|
||||
default="",
|
||||
description="Query string for summarization",
|
||||
placeholder="Enter query",
|
||||
)
|
||||
|
||||
|
||||
class ContentSettings(BaseModel):
|
||||
text: TextSettings = SchemaField(
|
||||
default=TextSettings(),
|
||||
description="Text content settings",
|
||||
)
|
||||
highlights: HighlightSettings = SchemaField(
|
||||
default=HighlightSettings(),
|
||||
description="Highlight settings",
|
||||
)
|
||||
summary: SummarySettings = SchemaField(
|
||||
default=SummarySettings(),
|
||||
description="Summary settings",
|
||||
)
|
||||
143
autogpt_platform/backend/backend/blocks/exa/search.py
Normal file
143
autogpt_platform/backend/backend/blocks/exa/search.py
Normal file
@@ -0,0 +1,143 @@
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from backend.blocks.exa._auth import (
|
||||
ExaCredentials,
|
||||
ExaCredentialsField,
|
||||
ExaCredentialsInput,
|
||||
)
|
||||
from backend.blocks.exa.helpers import ContentSettings
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
|
||||
class ExaSearchBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: ExaCredentialsInput = ExaCredentialsField()
|
||||
query: str = SchemaField(description="The search query")
|
||||
use_auto_prompt: bool = SchemaField(
|
||||
description="Whether to use autoprompt",
|
||||
default=True,
|
||||
advanced=True,
|
||||
)
|
||||
type: str = SchemaField(
|
||||
description="Type of search",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
category: str = SchemaField(
|
||||
description="Category to search within",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
number_of_results: int = SchemaField(
|
||||
description="Number of results to return",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default=[],
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
description="Start date for crawled content",
|
||||
)
|
||||
end_crawl_date: datetime = SchemaField(
|
||||
description="End date for crawled content",
|
||||
)
|
||||
start_published_date: datetime = SchemaField(
|
||||
description="Start date for published content",
|
||||
)
|
||||
end_published_date: datetime = SchemaField(
|
||||
description="End date for published content",
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
description="Content retrieval settings",
|
||||
default=ContentSettings(),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of search results",
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="996cec64-ac40-4dde-982f-b0dc60a5824d",
|
||||
description="Searches the web using Exa's advanced search API",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=ExaSearchBlock.Input,
|
||||
output_schema=ExaSearchBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: ExaCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
url = "https://api.exa.ai/search"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": credentials.api_key.get_secret_value(),
|
||||
}
|
||||
|
||||
payload = {
|
||||
"query": input_data.query,
|
||||
"useAutoprompt": input_data.use_auto_prompt,
|
||||
"numResults": input_data.number_of_results,
|
||||
"contents": input_data.contents.dict(),
|
||||
}
|
||||
|
||||
date_field_mapping = {
|
||||
"start_crawl_date": "startCrawlDate",
|
||||
"end_crawl_date": "endCrawlDate",
|
||||
"start_published_date": "startPublishedDate",
|
||||
"end_published_date": "endPublishedDate",
|
||||
}
|
||||
|
||||
# Add dates if they exist
|
||||
for input_field, api_field in date_field_mapping.items():
|
||||
value = getattr(input_data, input_field, None)
|
||||
if value:
|
||||
payload[api_field] = value.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
||||
|
||||
optional_field_mapping = {
|
||||
"type": "type",
|
||||
"category": "category",
|
||||
"include_domains": "includeDomains",
|
||||
"exclude_domains": "excludeDomains",
|
||||
"include_text": "includeText",
|
||||
"exclude_text": "excludeText",
|
||||
}
|
||||
|
||||
# Add other fields
|
||||
for input_field, api_field in optional_field_mapping.items():
|
||||
value = getattr(input_data, input_field)
|
||||
if value: # Only add non-empty values
|
||||
payload[api_field] = value
|
||||
|
||||
try:
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
# Extract just the results array from the response
|
||||
yield "results", data.get("results", [])
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
yield "results", []
|
||||
128
autogpt_platform/backend/backend/blocks/exa/similar.py
Normal file
128
autogpt_platform/backend/backend/blocks/exa/similar.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, List
|
||||
|
||||
from backend.blocks.exa._auth import (
|
||||
ExaCredentials,
|
||||
ExaCredentialsField,
|
||||
ExaCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
|
||||
from .helpers import ContentSettings
|
||||
|
||||
|
||||
class ExaFindSimilarBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: ExaCredentialsInput = ExaCredentialsField()
|
||||
url: str = SchemaField(
|
||||
description="The url for which you would like to find similar links"
|
||||
)
|
||||
number_of_results: int = SchemaField(
|
||||
description="Number of results to return",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
description="Start date for crawled content",
|
||||
)
|
||||
end_crawl_date: datetime = SchemaField(
|
||||
description="End date for crawled content",
|
||||
)
|
||||
start_published_date: datetime = SchemaField(
|
||||
description="Start date for published content",
|
||||
)
|
||||
end_published_date: datetime = SchemaField(
|
||||
description="End date for published content",
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include (max 1 string, up to 5 words)",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude (max 1 string, up to 5 words)",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
description="Content retrieval settings",
|
||||
default=ContentSettings(),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
results: List[Any] = SchemaField(
|
||||
description="List of similar documents with title, URL, published date, author, and score",
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5e7315d1-af61-4a0c-9350-7c868fa7438a",
|
||||
description="Finds similar links using Exa's findSimilar API",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=ExaFindSimilarBlock.Input,
|
||||
output_schema=ExaFindSimilarBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: ExaCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
url = "https://api.exa.ai/findSimilar"
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"x-api-key": credentials.api_key.get_secret_value(),
|
||||
}
|
||||
|
||||
payload = {
|
||||
"url": input_data.url,
|
||||
"numResults": input_data.number_of_results,
|
||||
"contents": input_data.contents.dict(),
|
||||
}
|
||||
|
||||
optional_field_mapping = {
|
||||
"include_domains": "includeDomains",
|
||||
"exclude_domains": "excludeDomains",
|
||||
"include_text": "includeText",
|
||||
"exclude_text": "excludeText",
|
||||
}
|
||||
|
||||
# Add optional fields if they have values
|
||||
for input_field, api_field in optional_field_mapping.items():
|
||||
value = getattr(input_data, input_field)
|
||||
if value: # Only add non-empty values
|
||||
payload[api_field] = value
|
||||
|
||||
date_field_mapping = {
|
||||
"start_crawl_date": "startCrawlDate",
|
||||
"end_crawl_date": "endCrawlDate",
|
||||
"start_published_date": "startPublishedDate",
|
||||
"end_published_date": "endPublishedDate",
|
||||
}
|
||||
|
||||
# Add dates if they exist
|
||||
for input_field, api_field in date_field_mapping.items():
|
||||
value = getattr(input_data, input_field, None)
|
||||
if value:
|
||||
payload[api_field] = value.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
||||
|
||||
try:
|
||||
response = requests.post(url, headers=headers, json=payload)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
yield "results", data.get("results", [])
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
yield "results", []
|
||||
@@ -3,10 +3,11 @@ from typing import Literal
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
FalCredentials = APIKeyCredentials
|
||||
FalCredentialsInput = CredentialsMetaInput[
|
||||
Literal["fal"],
|
||||
Literal[ProviderName.FAL],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
@@ -30,7 +31,5 @@ def FalCredentialsField() -> FalCredentialsInput:
|
||||
Creates a FAL credentials input on a block.
|
||||
"""
|
||||
return CredentialsField(
|
||||
provider="fal",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The FAL integration can be used with an API Key.",
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import logging
|
||||
import time
|
||||
from enum import Enum
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
@@ -64,7 +64,7 @@ class AIVideoGeneratorBlock(Block):
|
||||
},
|
||||
)
|
||||
|
||||
def _get_headers(self, api_key: str) -> Dict[str, str]:
|
||||
def _get_headers(self, api_key: str) -> dict[str, str]:
|
||||
"""Get headers for FAL API requests."""
|
||||
return {
|
||||
"Authorization": f"Key {api_key}",
|
||||
@@ -72,8 +72,8 @@ class AIVideoGeneratorBlock(Block):
|
||||
}
|
||||
|
||||
def _submit_request(
|
||||
self, url: str, headers: Dict[str, str], data: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, url: str, headers: dict[str, str], data: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
"""Submit a request to the FAL API."""
|
||||
try:
|
||||
response = httpx.post(url, headers=headers, json=data)
|
||||
@@ -83,7 +83,7 @@ class AIVideoGeneratorBlock(Block):
|
||||
logger.error(f"FAL API request failed: {str(e)}")
|
||||
raise RuntimeError(f"Failed to submit request: {str(e)}")
|
||||
|
||||
def _poll_status(self, status_url: str, headers: Dict[str, str]) -> Dict[str, Any]:
|
||||
def _poll_status(self, status_url: str, headers: dict[str, str]) -> dict[str, Any]:
|
||||
"""Poll the status endpoint until completion or failure."""
|
||||
try:
|
||||
response = httpx.get(status_url, headers=headers)
|
||||
|
||||
@@ -8,6 +8,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
OAuth2Credentials,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
secrets = Secrets()
|
||||
@@ -17,7 +18,7 @@ GITHUB_OAUTH_IS_CONFIGURED = bool(
|
||||
|
||||
GithubCredentials = APIKeyCredentials | OAuth2Credentials
|
||||
GithubCredentialsInput = CredentialsMetaInput[
|
||||
Literal["github"],
|
||||
Literal[ProviderName.GITHUB],
|
||||
Literal["api_key", "oauth2"] if GITHUB_OAUTH_IS_CONFIGURED else Literal["api_key"],
|
||||
]
|
||||
|
||||
@@ -30,10 +31,6 @@ def GithubCredentialsField(scope: str) -> GithubCredentialsInput:
|
||||
scope: The authorization scope needed for the block to work. ([list of available scopes](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes))
|
||||
""" # noqa
|
||||
return CredentialsField(
|
||||
provider="github",
|
||||
supported_credential_types=(
|
||||
{"api_key", "oauth2"} if GITHUB_OAUTH_IS_CONFIGURED else {"api_key"}
|
||||
),
|
||||
required_scopes={scope},
|
||||
description="The GitHub integration can be used with OAuth, "
|
||||
"or any API key with sufficient permissions for the blocks it is used on.",
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import re
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
@@ -253,7 +255,7 @@ class GithubReadPullRequestBlock(Block):
|
||||
@staticmethod
|
||||
def read_pr_changes(credentials: GithubCredentials, pr_url: str) -> str:
|
||||
api = get_api(credentials)
|
||||
files_url = pr_url + "/files"
|
||||
files_url = prepare_pr_api_url(pr_url=pr_url, path="files")
|
||||
response = api.get(files_url)
|
||||
files = response.json()
|
||||
changes = []
|
||||
@@ -331,7 +333,7 @@ class GithubAssignPRReviewerBlock(Block):
|
||||
credentials: GithubCredentials, pr_url: str, reviewer: str
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
reviewers_url = prepare_pr_api_url(pr_url=pr_url, path="requested_reviewers")
|
||||
data = {"reviewers": [reviewer]}
|
||||
api.post(reviewers_url, json=data)
|
||||
return "Reviewer assigned successfully"
|
||||
@@ -398,7 +400,7 @@ class GithubUnassignPRReviewerBlock(Block):
|
||||
credentials: GithubCredentials, pr_url: str, reviewer: str
|
||||
) -> str:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
reviewers_url = prepare_pr_api_url(pr_url=pr_url, path="requested_reviewers")
|
||||
data = {"reviewers": [reviewer]}
|
||||
api.delete(reviewers_url, json=data)
|
||||
return "Reviewer unassigned successfully"
|
||||
@@ -478,7 +480,7 @@ class GithubListPRReviewersBlock(Block):
|
||||
credentials: GithubCredentials, pr_url: str
|
||||
) -> list[Output.ReviewerItem]:
|
||||
api = get_api(credentials)
|
||||
reviewers_url = pr_url + "/requested_reviewers"
|
||||
reviewers_url = prepare_pr_api_url(pr_url=pr_url, path="requested_reviewers")
|
||||
response = api.get(reviewers_url)
|
||||
data = response.json()
|
||||
reviewers: list[GithubListPRReviewersBlock.Output.ReviewerItem] = [
|
||||
@@ -499,3 +501,14 @@ class GithubListPRReviewersBlock(Block):
|
||||
input_data.pr_url,
|
||||
)
|
||||
yield from (("reviewer", reviewer) for reviewer in reviewers)
|
||||
|
||||
|
||||
def prepare_pr_api_url(pr_url: str, path: str) -> str:
|
||||
# Pattern to capture the base repository URL and the pull request number
|
||||
pattern = r"^(?:https?://)?([^/]+/[^/]+/[^/]+)/pull/(\d+)"
|
||||
match = re.match(pattern, pr_url)
|
||||
if not match:
|
||||
return pr_url
|
||||
|
||||
base_url, pr_number = match.groups()
|
||||
return f"{base_url}/pulls/{pr_number}/{path}"
|
||||
|
||||
@@ -111,7 +111,9 @@ class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(self.EXAMPLE_PAYLOAD_FILE.read_text())
|
||||
example_payload = json.loads(
|
||||
self.EXAMPLE_PAYLOAD_FILE.read_text(encoding="utf-8")
|
||||
)
|
||||
|
||||
super().__init__(
|
||||
id="6c60ec01-8128-419e-988f-96a063ee2fea",
|
||||
|
||||
@@ -3,6 +3,7 @@ from typing import Literal
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import CredentialsField, CredentialsMetaInput, OAuth2Credentials
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
# --8<-- [start:GoogleOAuthIsConfigured]
|
||||
@@ -12,7 +13,9 @@ GOOGLE_OAUTH_IS_CONFIGURED = bool(
|
||||
)
|
||||
# --8<-- [end:GoogleOAuthIsConfigured]
|
||||
GoogleCredentials = OAuth2Credentials
|
||||
GoogleCredentialsInput = CredentialsMetaInput[Literal["google"], Literal["oauth2"]]
|
||||
GoogleCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.GOOGLE], Literal["oauth2"]
|
||||
]
|
||||
|
||||
|
||||
def GoogleCredentialsField(scopes: list[str]) -> GoogleCredentialsInput:
|
||||
@@ -23,8 +26,6 @@ def GoogleCredentialsField(scopes: list[str]) -> GoogleCredentialsInput:
|
||||
scopes: The authorization scopes needed for the block to work.
|
||||
"""
|
||||
return CredentialsField(
|
||||
provider="google",
|
||||
supported_credential_types={"oauth2"},
|
||||
required_scopes=set(scopes),
|
||||
description="The Google integration requires OAuth2 authentication.",
|
||||
)
|
||||
|
||||
@@ -10,6 +10,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -38,12 +39,8 @@ class Place(BaseModel):
|
||||
class GoogleMapsSearchBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal["google_maps"], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
provider="google_maps",
|
||||
supported_credential_types={"api_key"},
|
||||
description="Google Maps API Key",
|
||||
)
|
||||
Literal[ProviderName.GOOGLE_MAPS], Literal["api_key"]
|
||||
] = CredentialsField(description="Google Maps API Key")
|
||||
query: str = SchemaField(
|
||||
description="Search query for local businesses",
|
||||
placeholder="e.g., 'restaurants in New York'",
|
||||
|
||||
@@ -3,10 +3,11 @@ from typing import Literal
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
HubSpotCredentials = APIKeyCredentials
|
||||
HubSpotCredentialsInput = CredentialsMetaInput[
|
||||
Literal["hubspot"],
|
||||
Literal[ProviderName.HUBSPOT],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
@@ -14,8 +15,6 @@ HubSpotCredentialsInput = CredentialsMetaInput[
|
||||
def HubSpotCredentialsField() -> HubSpotCredentialsInput:
|
||||
"""Creates a HubSpot credentials input on a block."""
|
||||
return CredentialsField(
|
||||
provider="hubspot",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The HubSpot integration requires an API Key.",
|
||||
)
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -83,13 +84,10 @@ class UpscaleOption(str, Enum):
|
||||
|
||||
class IdeogramModelBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
|
||||
credentials: CredentialsMetaInput[Literal["ideogram"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="ideogram",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Ideogram integration can be used with any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.IDEOGRAM], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The Ideogram integration can be used with any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
prompt: str = SchemaField(
|
||||
description="Text prompt for image generation",
|
||||
|
||||
@@ -3,27 +3,14 @@ from typing import Literal
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
JinaCredentials = APIKeyCredentials
|
||||
JinaCredentialsInput = CredentialsMetaInput[
|
||||
Literal["jina"],
|
||||
Literal[ProviderName.JINA],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="jina",
|
||||
api_key=SecretStr("mock-jina-api-key"),
|
||||
title="Mock Jina API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.type,
|
||||
}
|
||||
|
||||
|
||||
def JinaCredentialsField() -> JinaCredentialsInput:
|
||||
"""
|
||||
@@ -31,8 +18,6 @@ def JinaCredentialsField() -> JinaCredentialsInput:
|
||||
|
||||
"""
|
||||
return CredentialsField(
|
||||
provider="jina",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Jina integration can be used with an API Key.",
|
||||
)
|
||||
|
||||
|
||||
59
autogpt_platform/backend/backend/blocks/jina/fact_checker.py
Normal file
59
autogpt_platform/backend/backend/blocks/jina/fact_checker.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from urllib.parse import quote
|
||||
|
||||
import requests
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
JinaCredentials,
|
||||
JinaCredentialsField,
|
||||
JinaCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class FactCheckerBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
statement: str = SchemaField(
|
||||
description="The statement to check for factuality"
|
||||
)
|
||||
credentials: JinaCredentialsInput = JinaCredentialsField()
|
||||
|
||||
class Output(BlockSchema):
|
||||
factuality: float = SchemaField(
|
||||
description="The factuality score of the statement"
|
||||
)
|
||||
result: bool = SchemaField(description="The result of the factuality check")
|
||||
reason: str = SchemaField(description="The reason for the factuality result")
|
||||
error: str = SchemaField(description="Error message if the check fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d38b6c5e-9968-4271-8423-6cfe60d6e7e6",
|
||||
description="This block checks the factuality of a given statement using Jina AI's Grounding API.",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=FactCheckerBlock.Input,
|
||||
output_schema=FactCheckerBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: JinaCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
encoded_statement = quote(input_data.statement)
|
||||
url = f"https://g.jina.ai/{encoded_statement}"
|
||||
|
||||
headers = {
|
||||
"Accept": "application/json",
|
||||
"Authorization": f"Bearer {credentials.api_key.get_secret_value()}",
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
|
||||
if "data" in data:
|
||||
data = data["data"]
|
||||
yield "factuality", data["factuality"]
|
||||
yield "result", data["result"]
|
||||
yield "reason", data["reason"]
|
||||
else:
|
||||
raise RuntimeError(f"Expected 'data' key not found in response: {data}")
|
||||
@@ -7,6 +7,8 @@ from typing import TYPE_CHECKING, Any, List, Literal, NamedTuple
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from enum import _EnumMemberT
|
||||
|
||||
@@ -27,7 +29,13 @@ from backend.util.settings import BehaveAs, Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LLMProviderName = Literal["anthropic", "groq", "openai", "ollama", "open_router"]
|
||||
LLMProviderName = Literal[
|
||||
ProviderName.ANTHROPIC,
|
||||
ProviderName.GROQ,
|
||||
ProviderName.OLLAMA,
|
||||
ProviderName.OPENAI,
|
||||
ProviderName.OPEN_ROUTER,
|
||||
]
|
||||
AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -48,8 +56,6 @@ TEST_CREDENTIALS_INPUT = {
|
||||
def AICredentialsField() -> AICredentials:
|
||||
return CredentialsField(
|
||||
description="API key for the LLM provider.",
|
||||
provider=["anthropic", "groq", "openai", "ollama", "open_router"],
|
||||
supported_credential_types={"api_key"},
|
||||
discriminator="model",
|
||||
discriminator_mapping={
|
||||
model.value: model.metadata.provider for model in LlmModel
|
||||
@@ -105,9 +111,9 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
# Ollama models
|
||||
OLLAMA_LLAMA3_8B = "llama3"
|
||||
OLLAMA_LLAMA3_405B = "llama3.1:405b"
|
||||
OLLAMA_DOLPHIN = "dolphin-mistral:latest"
|
||||
# OpenRouter models
|
||||
GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5"
|
||||
GEMINI_FLASH_1_5_EXP = "google/gemini-flash-1.5-exp"
|
||||
GROK_BETA = "x-ai/grok-beta"
|
||||
MISTRAL_NEMO = "mistralai/mistral-nemo"
|
||||
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
|
||||
@@ -117,6 +123,14 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
|
||||
"perplexity/llama-3.1-sonar-large-128k-online"
|
||||
)
|
||||
QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview"
|
||||
NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b"
|
||||
NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b"
|
||||
AMAZON_NOVA_LITE_V1 = "amazon/nova-lite-v1"
|
||||
AMAZON_NOVA_MICRO_V1 = "amazon/nova-micro-v1"
|
||||
AMAZON_NOVA_PRO_V1 = "amazon/nova-pro-v1"
|
||||
MICROSOFT_WIZARDLM_2_8X22B = "microsoft/wizardlm-2-8x22b"
|
||||
GRYPHE_MYTHOMAX_L2_13B = "gryphe/mythomax-l2-13b"
|
||||
|
||||
@property
|
||||
def metadata(self) -> ModelMetadata:
|
||||
@@ -151,8 +165,8 @@ MODEL_METADATA = {
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768),
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: ModelMetadata("open_router", 8192),
|
||||
LlmModel.GEMINI_FLASH_1_5_EXP: ModelMetadata("open_router", 8192),
|
||||
LlmModel.GROK_BETA: ModelMetadata("open_router", 8192),
|
||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 4000),
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 4000),
|
||||
@@ -162,6 +176,14 @@ MODEL_METADATA = {
|
||||
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
|
||||
"open_router", 8192
|
||||
),
|
||||
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 4000),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4000),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -220,6 +242,12 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
description="The maximum number of tokens to generate in the chat completion.",
|
||||
)
|
||||
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: dict[str, Any] = SchemaField(
|
||||
description="The response object generated by the language model."
|
||||
@@ -265,6 +293,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
prompt: list[dict],
|
||||
json_format: bool,
|
||||
max_tokens: int | None = None,
|
||||
ollama_host: str = "localhost:11434",
|
||||
) -> tuple[str, int, int]:
|
||||
"""
|
||||
Args:
|
||||
@@ -273,6 +302,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
prompt: The prompt to send to the LLM.
|
||||
json_format: Whether the response should be in JSON format.
|
||||
max_tokens: The maximum number of tokens to generate in the chat completion.
|
||||
ollama_host: The host for ollama to use
|
||||
|
||||
Returns:
|
||||
The response from the LLM.
|
||||
@@ -362,9 +392,10 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
response.usage.completion_tokens if response.usage else 0,
|
||||
)
|
||||
elif provider == "ollama":
|
||||
client = ollama.Client(host=ollama_host)
|
||||
sys_messages = [p["content"] for p in prompt if p["role"] == "system"]
|
||||
usr_messages = [p["content"] for p in prompt if p["role"] != "system"]
|
||||
response = ollama.generate(
|
||||
response = client.generate(
|
||||
model=llm_model.value,
|
||||
prompt=f"{sys_messages}\n\n{usr_messages}",
|
||||
stream=False,
|
||||
@@ -464,6 +495,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
llm_model=llm_model,
|
||||
prompt=prompt,
|
||||
json_format=bool(input_data.expected_format),
|
||||
ollama_host=input_data.ollama_host,
|
||||
max_tokens=input_data.max_tokens,
|
||||
)
|
||||
self.merge_stats(
|
||||
@@ -546,6 +578,11 @@ class AITextGeneratorBlock(Block):
|
||||
prompt_values: dict[str, str] = SchemaField(
|
||||
advanced=False, default={}, description="Values used to fill in the prompt."
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
max_tokens: int | None = SchemaField(
|
||||
advanced=True,
|
||||
default=None,
|
||||
@@ -636,6 +673,11 @@ class AITextSummarizerBlock(Block):
|
||||
description="The number of overlapping tokens between chunks to maintain context.",
|
||||
ge=0,
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
summary: str = SchemaField(description="The final summary of the text.")
|
||||
@@ -774,6 +816,11 @@ class AIConversationBlock(Block):
|
||||
default=None,
|
||||
description="The maximum number of tokens to generate in the chat completion.",
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: str = SchemaField(
|
||||
@@ -871,6 +918,11 @@ class AIListGeneratorBlock(Block):
|
||||
default=None,
|
||||
description="The maximum number of tokens to generate in the chat completion.",
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
generated_list: List[str] = SchemaField(description="The generated list.")
|
||||
@@ -1022,6 +1074,7 @@ class AIListGeneratorBlock(Block):
|
||||
credentials=input_data.credentials,
|
||||
model=input_data.model,
|
||||
expected_format={}, # Do not use structured response
|
||||
ollama_host=input_data.ollama_host,
|
||||
),
|
||||
credentials=credentials,
|
||||
)
|
||||
|
||||
@@ -12,6 +12,7 @@ from backend.data.model import (
|
||||
SchemaField,
|
||||
SecretField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -77,12 +78,10 @@ class PublishToMediumBlock(Block):
|
||||
description="Whether to notify followers that the user has published",
|
||||
placeholder="False",
|
||||
)
|
||||
credentials: CredentialsMetaInput[Literal["medium"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="medium",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Medium integration can be used with any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEDIUM], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The Medium integration can be used with any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -10,22 +10,18 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
PineconeCredentials = APIKeyCredentials
|
||||
PineconeCredentialsInput = CredentialsMetaInput[
|
||||
Literal["pinecone"],
|
||||
Literal[ProviderName.PINECONE],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
|
||||
def PineconeCredentialsField() -> PineconeCredentialsInput:
|
||||
"""
|
||||
Creates a Pinecone credentials input on a block.
|
||||
|
||||
"""
|
||||
"""Creates a Pinecone credentials input on a block."""
|
||||
return CredentialsField(
|
||||
provider="pinecone",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Pinecone integration can be used with an API Key.",
|
||||
)
|
||||
|
||||
@@ -147,7 +143,7 @@ class PineconeQueryBlock(Block):
|
||||
top_k=input_data.top_k,
|
||||
include_values=input_data.include_values,
|
||||
include_metadata=input_data.include_metadata,
|
||||
).to_dict()
|
||||
).to_dict() # type: ignore
|
||||
combined_text = ""
|
||||
if results["matches"]:
|
||||
texts = [
|
||||
|
||||
@@ -13,6 +13,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
@@ -54,13 +55,11 @@ class ImageType(str, Enum):
|
||||
|
||||
class ReplicateFluxAdvancedModelBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[Literal["replicate"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="replicate",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Replicate integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REPLICATE], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The Replicate integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
prompt: str = SchemaField(
|
||||
description="Text prompt for image generation",
|
||||
|
||||
@@ -11,6 +11,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
|
||||
class GetWikipediaSummaryBlock(Block, GetRequest):
|
||||
@@ -65,10 +66,8 @@ class GetWeatherInformationBlock(Block, GetRequest):
|
||||
description="Location to get weather information for"
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal["openweathermap"], Literal["api_key"]
|
||||
Literal[ProviderName.OPENWEATHERMAP], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
provider="openweathermap",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The OpenWeatherMap integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
70
autogpt_platform/backend/backend/blocks/slant3d/_api.py
Normal file
70
autogpt_platform/backend/backend/blocks/slant3d/_api.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
Slant3DCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.SLANT3D], Literal["api_key"]
|
||||
]
|
||||
|
||||
|
||||
def Slant3DCredentialsField() -> Slant3DCredentialsInput:
|
||||
return CredentialsField(description="Slant3D API key for authentication")
|
||||
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="slant3d",
|
||||
api_key=SecretStr("mock-slant3d-api-key"),
|
||||
title="Mock Slant3D API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
class CustomerDetails(BaseModel):
|
||||
name: str
|
||||
email: str
|
||||
phone: str
|
||||
address: str
|
||||
city: str
|
||||
state: str
|
||||
zip: str
|
||||
country_iso: str = "US"
|
||||
is_residential: bool = True
|
||||
|
||||
|
||||
class Color(Enum):
|
||||
WHITE = "white"
|
||||
BLACK = "black"
|
||||
|
||||
|
||||
class Profile(Enum):
|
||||
PLA = "PLA"
|
||||
PETG = "PETG"
|
||||
|
||||
|
||||
class OrderItem(BaseModel):
|
||||
# filename: str
|
||||
file_url: str
|
||||
quantity: str # String as per API spec
|
||||
color: Color = Color.WHITE
|
||||
profile: Profile = Profile.PLA
|
||||
# image_url: str = ""
|
||||
# sku: str = ""
|
||||
|
||||
|
||||
class Filament(BaseModel):
|
||||
filament: str
|
||||
hexColor: str
|
||||
colorTag: str
|
||||
profile: str
|
||||
94
autogpt_platform/backend/backend/blocks/slant3d/base.py
Normal file
94
autogpt_platform/backend/backend/blocks/slant3d/base.py
Normal file
@@ -0,0 +1,94 @@
|
||||
from typing import Any, Dict
|
||||
|
||||
from backend.data.block import Block
|
||||
from backend.util.request import requests
|
||||
|
||||
from ._api import Color, CustomerDetails, OrderItem, Profile
|
||||
|
||||
|
||||
class Slant3DBlockBase(Block):
|
||||
"""Base block class for Slant3D API interactions"""
|
||||
|
||||
BASE_URL = "https://www.slant3dapi.com/api"
|
||||
|
||||
def _get_headers(self, api_key: str) -> Dict[str, str]:
|
||||
return {"api-key": api_key, "Content-Type": "application/json"}
|
||||
|
||||
def _make_request(self, method: str, endpoint: str, api_key: str, **kwargs) -> Dict:
|
||||
url = f"{self.BASE_URL}/{endpoint}"
|
||||
response = requests.request(
|
||||
method=method, url=url, headers=self._get_headers(api_key), **kwargs
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
error_msg = response.json().get("error", "Unknown error")
|
||||
raise RuntimeError(f"API request failed: {error_msg}")
|
||||
|
||||
return response.json()
|
||||
|
||||
def _check_valid_color(self, profile: Profile, color: Color, api_key: str) -> str:
|
||||
response = self._make_request(
|
||||
"GET",
|
||||
"filament",
|
||||
api_key,
|
||||
params={"profile": profile.value, "color": color.value},
|
||||
)
|
||||
if profile == Profile.PLA:
|
||||
color_tag = color.value
|
||||
else:
|
||||
color_tag = f"{profile.value.lower()}{color.value.capitalize()}"
|
||||
valid_tags = [filament["colorTag"] for filament in response["filaments"]]
|
||||
|
||||
if color_tag not in valid_tags:
|
||||
raise ValueError(
|
||||
f"""Invalid color profile combination {color_tag}.
|
||||
Valid colors for {profile.value} are:
|
||||
{','.join([filament['colorTag'].replace(profile.value.lower(), '') for filament in response['filaments'] if filament['profile'] == profile.value])}
|
||||
"""
|
||||
)
|
||||
return color_tag
|
||||
|
||||
def _convert_to_color(self, profile: Profile, color: Color, api_key: str) -> str:
|
||||
return self._check_valid_color(profile, color, api_key)
|
||||
|
||||
def _format_order_data(
|
||||
self,
|
||||
customer: CustomerDetails,
|
||||
order_number: str,
|
||||
items: list[OrderItem],
|
||||
api_key: str,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Helper function to format order data for API requests"""
|
||||
orders = []
|
||||
for item in items:
|
||||
order_data = {
|
||||
"email": customer.email,
|
||||
"phone": customer.phone,
|
||||
"name": customer.name,
|
||||
"orderNumber": order_number,
|
||||
"filename": item.file_url,
|
||||
"fileURL": item.file_url,
|
||||
"bill_to_street_1": customer.address,
|
||||
"bill_to_city": customer.city,
|
||||
"bill_to_state": customer.state,
|
||||
"bill_to_zip": customer.zip,
|
||||
"bill_to_country_as_iso": customer.country_iso,
|
||||
"bill_to_is_US_residential": str(customer.is_residential).lower(),
|
||||
"ship_to_name": customer.name,
|
||||
"ship_to_street_1": customer.address,
|
||||
"ship_to_city": customer.city,
|
||||
"ship_to_state": customer.state,
|
||||
"ship_to_zip": customer.zip,
|
||||
"ship_to_country_as_iso": customer.country_iso,
|
||||
"ship_to_is_US_residential": str(customer.is_residential).lower(),
|
||||
"order_item_name": item.file_url,
|
||||
"order_quantity": item.quantity,
|
||||
"order_image_url": "",
|
||||
"order_sku": "NOT_USED",
|
||||
"order_item_color": self._convert_to_color(
|
||||
item.profile, item.color, api_key
|
||||
),
|
||||
"profile": item.profile.value,
|
||||
}
|
||||
orders.append(order_data)
|
||||
return orders
|
||||
85
autogpt_platform/backend/backend/blocks/slant3d/filament.py
Normal file
85
autogpt_platform/backend/backend/blocks/slant3d/filament.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from typing import List
|
||||
|
||||
from backend.data.block import BlockOutput, BlockSchema
|
||||
from backend.data.model import APIKeyCredentials, SchemaField
|
||||
|
||||
from ._api import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
Filament,
|
||||
Slant3DCredentialsField,
|
||||
Slant3DCredentialsInput,
|
||||
)
|
||||
from .base import Slant3DBlockBase
|
||||
|
||||
|
||||
class Slant3DFilamentBlock(Slant3DBlockBase):
|
||||
"""Block for retrieving available filaments"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
|
||||
class Output(BlockSchema):
|
||||
filaments: List[Filament] = SchemaField(
|
||||
description="List of available filaments"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7cc416f4-f305-4606-9b3b-452b8a81031c",
|
||||
description="Get list of available filaments",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"filaments",
|
||||
[
|
||||
{
|
||||
"filament": "PLA BLACK",
|
||||
"hexColor": "000000",
|
||||
"colorTag": "black",
|
||||
"profile": "PLA",
|
||||
},
|
||||
{
|
||||
"filament": "PLA WHITE",
|
||||
"hexColor": "ffffff",
|
||||
"colorTag": "white",
|
||||
"profile": "PLA",
|
||||
},
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"filaments": [
|
||||
{
|
||||
"filament": "PLA BLACK",
|
||||
"hexColor": "000000",
|
||||
"colorTag": "black",
|
||||
"profile": "PLA",
|
||||
},
|
||||
{
|
||||
"filament": "PLA WHITE",
|
||||
"hexColor": "ffffff",
|
||||
"colorTag": "white",
|
||||
"profile": "PLA",
|
||||
},
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self._make_request(
|
||||
"GET", "filament", credentials.api_key.get_secret_value()
|
||||
)
|
||||
yield "filaments", result["filaments"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
418
autogpt_platform/backend/backend/blocks/slant3d/order.py
Normal file
418
autogpt_platform/backend/backend/blocks/slant3d/order.py
Normal file
@@ -0,0 +1,418 @@
|
||||
import uuid
|
||||
from typing import List
|
||||
|
||||
import requests as baserequests
|
||||
|
||||
from backend.data.block import BlockOutput, BlockSchema
|
||||
from backend.data.model import APIKeyCredentials, SchemaField
|
||||
from backend.util import settings
|
||||
from backend.util.settings import BehaveAs
|
||||
|
||||
from ._api import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
CustomerDetails,
|
||||
OrderItem,
|
||||
Slant3DCredentialsField,
|
||||
Slant3DCredentialsInput,
|
||||
)
|
||||
from .base import Slant3DBlockBase
|
||||
|
||||
|
||||
class Slant3DCreateOrderBlock(Slant3DBlockBase):
|
||||
"""Block for creating new orders"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
order_number: str = SchemaField(
|
||||
description="Your custom order number (or leave blank for a random one)",
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
customer: CustomerDetails = SchemaField(
|
||||
description="Customer details for where to ship the item",
|
||||
advanced=False,
|
||||
)
|
||||
items: List[OrderItem] = SchemaField(
|
||||
description="List of items to print",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
order_id: str = SchemaField(description="Slant3D order ID")
|
||||
error: str = SchemaField(description="Error message if order failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f73007d6-f48f-4aaf-9e6b-6883998a09b4",
|
||||
description="Create a new print order",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"order_number": "TEST-001",
|
||||
"customer": {
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"phone": "123-456-7890",
|
||||
"address": "123 Test St",
|
||||
"city": "Test City",
|
||||
"state": "TS",
|
||||
"zip": "12345",
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"file_url": "https://example.com/model.stl",
|
||||
"quantity": "1",
|
||||
"color": "black",
|
||||
"profile": "PLA",
|
||||
}
|
||||
],
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("order_id", "314144241")],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {"orderId": "314144241"},
|
||||
"_convert_to_color": lambda *args, **kwargs: "black",
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
order_data = self._format_order_data(
|
||||
input_data.customer,
|
||||
input_data.order_number,
|
||||
input_data.items,
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
result = self._make_request(
|
||||
"POST", "order", credentials.api_key.get_secret_value(), json=order_data
|
||||
)
|
||||
yield "order_id", result["orderId"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
|
||||
|
||||
class Slant3DEstimateOrderBlock(Slant3DBlockBase):
|
||||
"""Block for getting order cost estimates"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
order_number: str = SchemaField(
|
||||
description="Your custom order number (or leave blank for a random one)",
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
customer: CustomerDetails = SchemaField(
|
||||
description="Customer details for where to ship the item",
|
||||
advanced=False,
|
||||
)
|
||||
items: List[OrderItem] = SchemaField(
|
||||
description="List of items to print",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
total_price: float = SchemaField(description="Total price in USD")
|
||||
shipping_cost: float = SchemaField(description="Shipping cost")
|
||||
printing_cost: float = SchemaField(description="Printing cost")
|
||||
error: str = SchemaField(description="Error message if estimation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="bf8823d6-b42a-48c7-b558-d7c117f2ae85",
|
||||
description="Get order cost estimate",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"order_number": "TEST-001",
|
||||
"customer": {
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"phone": "123-456-7890",
|
||||
"address": "123 Test St",
|
||||
"city": "Test City",
|
||||
"state": "TS",
|
||||
"zip": "12345",
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"file_url": "https://example.com/model.stl",
|
||||
"quantity": "1",
|
||||
"color": "black",
|
||||
"profile": "PLA",
|
||||
}
|
||||
],
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("total_price", 9.31),
|
||||
("shipping_cost", 5.56),
|
||||
("printing_cost", 3.75),
|
||||
],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"totalPrice": 9.31,
|
||||
"shippingCost": 5.56,
|
||||
"printingCost": 3.75,
|
||||
},
|
||||
"_convert_to_color": lambda *args, **kwargs: "black",
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
order_data = self._format_order_data(
|
||||
input_data.customer,
|
||||
input_data.order_number,
|
||||
input_data.items,
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
try:
|
||||
result = self._make_request(
|
||||
"POST",
|
||||
"order/estimate",
|
||||
credentials.api_key.get_secret_value(),
|
||||
json=order_data,
|
||||
)
|
||||
yield "total_price", result["totalPrice"]
|
||||
yield "shipping_cost", result["shippingCost"]
|
||||
yield "printing_cost", result["printingCost"]
|
||||
except baserequests.HTTPError as e:
|
||||
yield "error", str(f"Error estimating order: {e} {e.response.text}")
|
||||
raise
|
||||
|
||||
|
||||
class Slant3DEstimateShippingBlock(Slant3DBlockBase):
|
||||
"""Block for getting shipping cost estimates"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
order_number: str = SchemaField(
|
||||
description="Your custom order number (or leave blank for a random one)",
|
||||
default_factory=lambda: str(uuid.uuid4()),
|
||||
)
|
||||
customer: CustomerDetails = SchemaField(
|
||||
description="Customer details for where to ship the item"
|
||||
)
|
||||
items: List[OrderItem] = SchemaField(
|
||||
description="List of items to print",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
shipping_cost: float = SchemaField(description="Estimated shipping cost")
|
||||
currency_code: str = SchemaField(description="Currency code (e.g., 'usd')")
|
||||
error: str = SchemaField(description="Error message if estimation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="00aae2a1-caf6-4a74-8175-39a0615d44e1",
|
||||
description="Get shipping cost estimate",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"order_number": "TEST-001",
|
||||
"customer": {
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"phone": "123-456-7890",
|
||||
"address": "123 Test St",
|
||||
"city": "Test City",
|
||||
"state": "TS",
|
||||
"zip": "12345",
|
||||
},
|
||||
"items": [
|
||||
{
|
||||
"file_url": "https://example.com/model.stl",
|
||||
"quantity": "1",
|
||||
"color": "black",
|
||||
"profile": "PLA",
|
||||
}
|
||||
],
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("shipping_cost", 4.81), ("currency_code", "usd")],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"shippingCost": 4.81,
|
||||
"currencyCode": "usd",
|
||||
},
|
||||
"_convert_to_color": lambda *args, **kwargs: "black",
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
order_data = self._format_order_data(
|
||||
input_data.customer,
|
||||
input_data.order_number,
|
||||
input_data.items,
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
result = self._make_request(
|
||||
"POST",
|
||||
"order/estimateShipping",
|
||||
credentials.api_key.get_secret_value(),
|
||||
json=order_data,
|
||||
)
|
||||
yield "shipping_cost", result["shippingCost"]
|
||||
yield "currency_code", result["currencyCode"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
|
||||
|
||||
class Slant3DGetOrdersBlock(Slant3DBlockBase):
|
||||
"""Block for retrieving all orders"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
|
||||
class Output(BlockSchema):
|
||||
orders: List[str] = SchemaField(description="List of orders with their details")
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="42283bf5-8a32-4fb4-92a2-60a9ea48e105",
|
||||
description="Get all orders for the account",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
# This block is disabled for cloud hosted because it allows access to all orders for the account
|
||||
disabled=settings.Settings().config.behave_as == BehaveAs.CLOUD,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"orders",
|
||||
[
|
||||
"1234567890",
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"ordersData": [
|
||||
{
|
||||
"orderId": 1234567890,
|
||||
"orderTimestamp": {
|
||||
"_seconds": 1719510986,
|
||||
"_nanoseconds": 710000000,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self._make_request(
|
||||
"GET", "order", credentials.api_key.get_secret_value()
|
||||
)
|
||||
yield "orders", [str(order["orderId"]) for order in result["ordersData"]]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
|
||||
|
||||
class Slant3DTrackingBlock(Slant3DBlockBase):
|
||||
"""Block for tracking order status and shipping"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
order_id: str = SchemaField(description="Slant3D order ID to track")
|
||||
|
||||
class Output(BlockSchema):
|
||||
status: str = SchemaField(description="Order status")
|
||||
tracking_numbers: List[str] = SchemaField(
|
||||
description="List of tracking numbers"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if tracking failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="dd7c0293-c5af-4551-ba3e-fc162fb1fb89",
|
||||
description="Track order status and shipping",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"order_id": "314144241",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "awaiting_shipment"), ("tracking_numbers", [])],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"status": "awaiting_shipment",
|
||||
"trackingNumbers": [],
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self._make_request(
|
||||
"GET",
|
||||
f"order/{input_data.order_id}/get-tracking",
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
yield "status", result["status"]
|
||||
yield "tracking_numbers", result["trackingNumbers"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
|
||||
|
||||
class Slant3DCancelOrderBlock(Slant3DBlockBase):
|
||||
"""Block for canceling orders"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
order_id: str = SchemaField(description="Slant3D order ID to cancel")
|
||||
|
||||
class Output(BlockSchema):
|
||||
status: str = SchemaField(description="Cancellation status message")
|
||||
error: str = SchemaField(description="Error message if cancellation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="54de35e1-407f-450b-b5fa-3b5e2eba8185",
|
||||
description="Cancel an existing order",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"order_id": "314144241",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "Order cancelled")],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {"status": "Order cancelled"}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self._make_request(
|
||||
"DELETE",
|
||||
f"order/{input_data.order_id}",
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
yield "status", result["status"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
61
autogpt_platform/backend/backend/blocks/slant3d/slicing.py
Normal file
61
autogpt_platform/backend/backend/blocks/slant3d/slicing.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from backend.data.block import BlockOutput, BlockSchema
|
||||
from backend.data.model import APIKeyCredentials, SchemaField
|
||||
|
||||
from ._api import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
Slant3DCredentialsField,
|
||||
Slant3DCredentialsInput,
|
||||
)
|
||||
from .base import Slant3DBlockBase
|
||||
|
||||
|
||||
class Slant3DSlicerBlock(Slant3DBlockBase):
|
||||
"""Block for slicing 3D model files"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
file_url: str = SchemaField(
|
||||
description="URL of the 3D model file to slice (STL)"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
message: str = SchemaField(description="Response message")
|
||||
price: float = SchemaField(description="Calculated price for printing")
|
||||
error: str = SchemaField(description="Error message if slicing failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f8a12c8d-3e4b-4d5f-b6a7-8c9d0e1f2g3h",
|
||||
description="Slice a 3D model file and get pricing information",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"file_url": "https://example.com/model.stl",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("message", "Slicing successful"), ("price", 8.23)],
|
||||
test_mock={
|
||||
"_make_request": lambda *args, **kwargs: {
|
||||
"message": "Slicing successful",
|
||||
"data": {"price": 8.23},
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self._make_request(
|
||||
"POST",
|
||||
"slicer",
|
||||
credentials.api_key.get_secret_value(),
|
||||
json={"fileURL": input_data.file_url},
|
||||
)
|
||||
yield "message", result["message"]
|
||||
yield "price", result["data"]["price"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
raise
|
||||
125
autogpt_platform/backend/backend/blocks/slant3d/webhook.py
Normal file
125
autogpt_platform/backend/backend/blocks/slant3d/webhook.py
Normal file
@@ -0,0 +1,125 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import settings
|
||||
from backend.util.settings import AppEnvironment, BehaveAs
|
||||
|
||||
from ._api import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
Slant3DCredentialsField,
|
||||
Slant3DCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class Slant3DTriggerBase:
|
||||
"""Base class for Slant3D webhook triggers"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
# Webhook URL is handled by the webhook system
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload received from Slant3D"
|
||||
)
|
||||
order_id: str = SchemaField(description="The ID of the affected order")
|
||||
error: str = SchemaField(
|
||||
description="Error message if payload processing failed"
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "order_id", input_data.payload["orderId"]
|
||||
|
||||
|
||||
class Slant3DOrderWebhookBlock(Slant3DTriggerBase, Block):
|
||||
"""Block for handling Slant3D order webhooks"""
|
||||
|
||||
class Input(Slant3DTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
Currently Slant3D only supports 'SHIPPED' status updates
|
||||
Could be expanded in the future with more status types
|
||||
"""
|
||||
|
||||
shipped: bool = True
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events",
|
||||
description="Order status events to subscribe to",
|
||||
default=EventsFilter(shipped=True),
|
||||
)
|
||||
|
||||
class Output(Slant3DTriggerBase.Output):
|
||||
status: str = SchemaField(description="The new status of the order")
|
||||
tracking_number: str = SchemaField(
|
||||
description="The tracking number for the shipment"
|
||||
)
|
||||
carrier_code: str = SchemaField(description="The carrier code (e.g., 'usps')")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8a74c2ad-0104-4640-962f-26c6b69e58cd",
|
||||
description=(
|
||||
"This block triggers on Slant3D order status updates and outputs "
|
||||
"the event details, including tracking information when orders are shipped."
|
||||
),
|
||||
# All webhooks are currently subscribed to for all orders. This works for self hosted, but not for cloud hosted prod
|
||||
disabled=(
|
||||
settings.Settings().config.behave_as == BehaveAs.CLOUD
|
||||
and settings.Settings().config.app_env != AppEnvironment.LOCAL
|
||||
),
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider="slant3d",
|
||||
webhook_type="orders", # Only one type for now
|
||||
resource_format="", # No resource format needed
|
||||
event_filter_input="events",
|
||||
event_format="order.{event}",
|
||||
),
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"events": {"shipped": True},
|
||||
"payload": {
|
||||
"orderId": "1234567890",
|
||||
"status": "SHIPPED",
|
||||
"trackingNumber": "ABCDEF123456",
|
||||
"carrierCode": "usps",
|
||||
},
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"payload",
|
||||
{
|
||||
"orderId": "1234567890",
|
||||
"status": "SHIPPED",
|
||||
"trackingNumber": "ABCDEF123456",
|
||||
"carrierCode": "usps",
|
||||
},
|
||||
),
|
||||
("order_id", "1234567890"),
|
||||
("status", "SHIPPED"),
|
||||
("tracking_number", "ABCDEF123456"),
|
||||
("carrier_code", "usps"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
yield from super().run(input_data, **kwargs)
|
||||
|
||||
# Extract and normalize values from the payload
|
||||
yield "status", input_data.payload["status"]
|
||||
yield "tracking_number", input_data.payload["trackingNumber"]
|
||||
yield "carrier_code", input_data.payload["carrierCode"]
|
||||
@@ -10,6 +10,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -29,13 +30,11 @@ TEST_CREDENTIALS_INPUT = {
|
||||
|
||||
class CreateTalkingAvatarVideoBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[Literal["d_id"], Literal["api_key"]] = (
|
||||
CredentialsField(
|
||||
provider="d_id",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The D-ID integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.D_ID], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="The D-ID integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
script_input: str = SchemaField(
|
||||
description="The text input for the script",
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from jinja2 import BaseLoader, Environment
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
from backend.util import json, text
|
||||
|
||||
jinja = Environment(loader=BaseLoader())
|
||||
formatter = text.TextFormatter()
|
||||
|
||||
|
||||
class MatchTextPatternBlock(Block):
|
||||
@@ -73,6 +71,7 @@ class ExtractTextInformationBlock(Block):
|
||||
description="Case sensitive match", default=True
|
||||
)
|
||||
dot_all: bool = SchemaField(description="Dot matches all", default=True)
|
||||
find_all: bool = SchemaField(description="Find all matches", default=False)
|
||||
|
||||
class Output(BlockSchema):
|
||||
positive: str = SchemaField(description="Extracted text")
|
||||
@@ -90,12 +89,27 @@ class ExtractTextInformationBlock(Block):
|
||||
{"text": "Hello, World!", "pattern": "Hello, (.+)", "group": 0},
|
||||
{"text": "Hello, World!", "pattern": "Hello, (.+)", "group": 2},
|
||||
{"text": "Hello, World!", "pattern": "hello,", "case_sensitive": False},
|
||||
{
|
||||
"text": "Hello, World!! Hello, Earth!!",
|
||||
"pattern": "Hello, (\\S+)",
|
||||
"group": 1,
|
||||
"find_all": False,
|
||||
},
|
||||
{
|
||||
"text": "Hello, World!! Hello, Earth!!",
|
||||
"pattern": "Hello, (\\S+)",
|
||||
"group": 1,
|
||||
"find_all": True,
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("positive", "World!"),
|
||||
("positive", "Hello, World!"),
|
||||
("negative", "Hello, World!"),
|
||||
("positive", "Hello,"),
|
||||
("positive", "World!!"),
|
||||
("positive", "World!!"),
|
||||
("positive", "Earth!!"),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -107,15 +121,21 @@ class ExtractTextInformationBlock(Block):
|
||||
flags = flags | re.DOTALL
|
||||
|
||||
if isinstance(input_data.text, str):
|
||||
text = input_data.text
|
||||
txt = input_data.text
|
||||
else:
|
||||
text = json.dumps(input_data.text)
|
||||
txt = json.dumps(input_data.text)
|
||||
|
||||
match = re.search(input_data.pattern, text, flags)
|
||||
if match and input_data.group <= len(match.groups()):
|
||||
yield "positive", match.group(input_data.group)
|
||||
else:
|
||||
yield "negative", text
|
||||
matches = [
|
||||
match.group(input_data.group)
|
||||
for match in re.finditer(input_data.pattern, txt, flags)
|
||||
if input_data.group <= len(match.groups())
|
||||
]
|
||||
for match in matches:
|
||||
yield "positive", match
|
||||
if not input_data.find_all:
|
||||
return
|
||||
if not matches:
|
||||
yield "negative", input_data.text
|
||||
|
||||
|
||||
class FillTextTemplateBlock(Block):
|
||||
@@ -146,19 +166,20 @@ class FillTextTemplateBlock(Block):
|
||||
"values": {"list": ["Hello", " World!"]},
|
||||
"format": "{% for item in list %}{{ item }}{% endfor %}",
|
||||
},
|
||||
{
|
||||
"values": {},
|
||||
"format": "{% set name = 'Alice' %}Hello, World! {{ name }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World! Alice"),
|
||||
("output", "Hello World!"),
|
||||
("output", "Hello, World! Alice"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
# For python.format compatibility: replace all {...} with {{..}}.
|
||||
# But avoid replacing {{...}} to {{{...}}}.
|
||||
fmt = re.sub(r"(?<!{){[ a-zA-Z0-9_]+}", r"{\g<0>}", input_data.format)
|
||||
template = jinja.from_string(fmt)
|
||||
yield "output", template.render(**input_data.values)
|
||||
yield "output", formatter.format_string(input_data.format, input_data.values)
|
||||
|
||||
|
||||
class CombineTextsBlock(Block):
|
||||
|
||||
@@ -9,6 +9,7 @@ from backend.data.model import (
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
@@ -38,10 +39,8 @@ class UnrealTextToSpeechBlock(Block):
|
||||
default="Scarlett",
|
||||
)
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal["unreal_speech"], Literal["api_key"]
|
||||
Literal[ProviderName.UNREAL_SPEECH], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
provider="unreal_speech",
|
||||
supported_credential_types={"api_key"},
|
||||
description="The Unreal Speech integration can be used with "
|
||||
"any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
60
autogpt_platform/backend/backend/blocks/twitter/_auth.py
Normal file
60
autogpt_platform/backend/backend/blocks/twitter/_auth.py
Normal file
@@ -0,0 +1,60 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import (
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
OAuth2Credentials,
|
||||
ProviderName,
|
||||
)
|
||||
from backend.integrations.oauth.twitter import TwitterOAuthHandler
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
# --8<-- [start:TwitterOAuthIsConfigured]
|
||||
secrets = Secrets()
|
||||
TWITTER_OAUTH_IS_CONFIGURED = bool(
|
||||
secrets.twitter_client_id and secrets.twitter_client_secret
|
||||
)
|
||||
# --8<-- [end:TwitterOAuthIsConfigured]
|
||||
|
||||
TwitterCredentials = OAuth2Credentials
|
||||
TwitterCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.TWITTER], Literal["oauth2"]
|
||||
]
|
||||
|
||||
|
||||
# Currently, We are getting all the permission from the Twitter API initally
|
||||
# In future, If we need to add incremental permission, we can use these requested_scopes
|
||||
def TwitterCredentialsField(scopes: list[str]) -> TwitterCredentialsInput:
|
||||
"""
|
||||
Creates a Twitter credentials input on a block.
|
||||
|
||||
Params:
|
||||
scopes: The authorization scopes needed for the block to work.
|
||||
"""
|
||||
return CredentialsField(
|
||||
# required_scopes=set(scopes),
|
||||
required_scopes=set(TwitterOAuthHandler.DEFAULT_SCOPES + scopes),
|
||||
description="The Twitter integration requires OAuth2 authentication.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS = OAuth2Credentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="twitter",
|
||||
access_token=SecretStr("mock-twitter-access-token"),
|
||||
refresh_token=SecretStr("mock-twitter-refresh-token"),
|
||||
access_token_expires_at=1234567890,
|
||||
scopes=["tweet.read", "tweet.write", "users.read", "offline.access"],
|
||||
title="Mock Twitter OAuth2 Credentials",
|
||||
username="mock-twitter-username",
|
||||
refresh_token_expires_at=1234567890,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
418
autogpt_platform/backend/backend/blocks/twitter/_builders.py
Normal file
418
autogpt_platform/backend/backend/blocks/twitter/_builders.py
Normal file
@@ -0,0 +1,418 @@
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict
|
||||
|
||||
from backend.blocks.twitter._mappers import (
|
||||
get_backend_expansion,
|
||||
get_backend_field,
|
||||
get_backend_list_expansion,
|
||||
get_backend_list_field,
|
||||
get_backend_media_field,
|
||||
get_backend_place_field,
|
||||
get_backend_poll_field,
|
||||
get_backend_space_expansion,
|
||||
get_backend_space_field,
|
||||
get_backend_user_field,
|
||||
)
|
||||
from backend.blocks.twitter._types import ( # DMEventFieldFilter,
|
||||
DMEventExpansionFilter,
|
||||
DMEventTypeFilter,
|
||||
DMMediaFieldFilter,
|
||||
DMTweetFieldFilter,
|
||||
ExpansionFilter,
|
||||
ListExpansionsFilter,
|
||||
ListFieldsFilter,
|
||||
SpaceExpansionsFilter,
|
||||
SpaceFieldsFilter,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetReplySettingsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
|
||||
|
||||
# Common Builder
|
||||
class TweetExpansionsBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_expansions(self, expansions: ExpansionFilter | None):
|
||||
if expansions:
|
||||
filtered_expansions = [
|
||||
name for name, value in expansions.dict().items() if value is True
|
||||
]
|
||||
|
||||
if filtered_expansions:
|
||||
self.params["expansions"] = ",".join(
|
||||
[get_backend_expansion(exp) for exp in filtered_expansions]
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
def add_media_fields(self, media_fields: TweetMediaFieldsFilter | None):
|
||||
if media_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in media_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["media.fields"] = ",".join(
|
||||
[get_backend_media_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_place_fields(self, place_fields: TweetPlaceFieldsFilter | None):
|
||||
if place_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in place_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["place.fields"] = ",".join(
|
||||
[get_backend_place_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_poll_fields(self, poll_fields: TweetPollFieldsFilter | None):
|
||||
if poll_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in poll_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["poll.fields"] = ",".join(
|
||||
[get_backend_poll_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None):
|
||||
if tweet_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in tweet_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["tweet.fields"] = ",".join(
|
||||
[get_backend_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
|
||||
if user_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in user_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["user.fields"] = ",".join(
|
||||
[get_backend_user_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class UserExpansionsBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_expansions(self, expansions: UserExpansionsFilter | None):
|
||||
if expansions:
|
||||
filtered_expansions = [
|
||||
name for name, value in expansions.dict().items() if value is True
|
||||
]
|
||||
if filtered_expansions:
|
||||
self.params["expansions"] = ",".join(filtered_expansions)
|
||||
return self
|
||||
|
||||
def add_tweet_fields(self, tweet_fields: TweetFieldsFilter | None):
|
||||
if tweet_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in tweet_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["tweet.fields"] = ",".join(
|
||||
[get_backend_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
|
||||
if user_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in user_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["user.fields"] = ",".join(
|
||||
[get_backend_user_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class ListExpansionsBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_expansions(self, expansions: ListExpansionsFilter | None):
|
||||
if expansions:
|
||||
filtered_expansions = [
|
||||
name for name, value in expansions.dict().items() if value is True
|
||||
]
|
||||
if filtered_expansions:
|
||||
self.params["expansions"] = ",".join(
|
||||
[get_backend_list_expansion(exp) for exp in filtered_expansions]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_list_fields(self, list_fields: ListFieldsFilter | None):
|
||||
if list_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in list_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["list.fields"] = ",".join(
|
||||
[get_backend_list_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
|
||||
if user_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in user_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["user.fields"] = ",".join(
|
||||
[get_backend_user_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class SpaceExpansionsBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_expansions(self, expansions: SpaceExpansionsFilter | None):
|
||||
if expansions:
|
||||
filtered_expansions = [
|
||||
name for name, value in expansions.dict().items() if value is True
|
||||
]
|
||||
if filtered_expansions:
|
||||
self.params["expansions"] = ",".join(
|
||||
[get_backend_space_expansion(exp) for exp in filtered_expansions]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_space_fields(self, space_fields: SpaceFieldsFilter | None):
|
||||
if space_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in space_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["space.fields"] = ",".join(
|
||||
[get_backend_space_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def add_user_fields(self, user_fields: TweetUserFieldsFilter | None):
|
||||
if user_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in user_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["user.fields"] = ",".join(
|
||||
[get_backend_user_field(field) for field in filtered_fields]
|
||||
)
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class TweetDurationBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_start_time(self, start_time: datetime | None):
|
||||
if start_time:
|
||||
self.params["start_time"] = start_time
|
||||
return self
|
||||
|
||||
def add_end_time(self, end_time: datetime | None):
|
||||
if end_time:
|
||||
self.params["end_time"] = end_time
|
||||
return self
|
||||
|
||||
def add_since_id(self, since_id: str | None):
|
||||
if since_id:
|
||||
self.params["since_id"] = since_id
|
||||
return self
|
||||
|
||||
def add_until_id(self, until_id: str | None):
|
||||
if until_id:
|
||||
self.params["until_id"] = until_id
|
||||
return self
|
||||
|
||||
def add_sort_order(self, sort_order: str | None):
|
||||
if sort_order:
|
||||
self.params["sort_order"] = sort_order
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class DMExpansionsBuilder:
|
||||
def __init__(self, param: Dict[str, Any]):
|
||||
self.params: Dict[str, Any] = param
|
||||
|
||||
def add_expansions(self, expansions: DMEventExpansionFilter):
|
||||
if expansions:
|
||||
filtered_expansions = [
|
||||
name for name, value in expansions.dict().items() if value is True
|
||||
]
|
||||
if filtered_expansions:
|
||||
self.params["expansions"] = ",".join(filtered_expansions)
|
||||
return self
|
||||
|
||||
def add_event_types(self, event_types: DMEventTypeFilter):
|
||||
if event_types:
|
||||
filtered_types = [
|
||||
name for name, value in event_types.dict().items() if value is True
|
||||
]
|
||||
if filtered_types:
|
||||
self.params["event_types"] = ",".join(filtered_types)
|
||||
return self
|
||||
|
||||
def add_media_fields(self, media_fields: DMMediaFieldFilter):
|
||||
if media_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in media_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["media.fields"] = ",".join(filtered_fields)
|
||||
return self
|
||||
|
||||
def add_tweet_fields(self, tweet_fields: DMTweetFieldFilter):
|
||||
if tweet_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in tweet_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["tweet.fields"] = ",".join(filtered_fields)
|
||||
return self
|
||||
|
||||
def add_user_fields(self, user_fields: TweetUserFieldsFilter):
|
||||
if user_fields:
|
||||
filtered_fields = [
|
||||
name for name, value in user_fields.dict().items() if value is True
|
||||
]
|
||||
if filtered_fields:
|
||||
self.params["user.fields"] = ",".join(filtered_fields)
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
# Specific Builders
|
||||
class TweetSearchBuilder:
|
||||
def __init__(self):
|
||||
self.params: Dict[str, Any] = {"user_auth": False}
|
||||
|
||||
def add_query(self, query: str):
|
||||
if query:
|
||||
self.params["query"] = query
|
||||
return self
|
||||
|
||||
def add_pagination(self, max_results: int, pagination: str | None):
|
||||
if max_results:
|
||||
self.params["max_results"] = max_results
|
||||
if pagination:
|
||||
self.params["pagination_token"] = pagination
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class TweetPostBuilder:
|
||||
def __init__(self):
|
||||
self.params: Dict[str, Any] = {"user_auth": False}
|
||||
|
||||
def add_text(self, text: str):
|
||||
if text:
|
||||
self.params["text"] = text
|
||||
return self
|
||||
|
||||
def add_media(self, media_ids: list, tagged_user_ids: list):
|
||||
if media_ids:
|
||||
self.params["media_ids"] = media_ids
|
||||
if tagged_user_ids:
|
||||
self.params["media_tagged_user_ids"] = tagged_user_ids
|
||||
return self
|
||||
|
||||
def add_deep_link(self, link: str):
|
||||
if link:
|
||||
self.params["direct_message_deep_link"] = link
|
||||
return self
|
||||
|
||||
def add_super_followers(self, for_super_followers: bool):
|
||||
if for_super_followers:
|
||||
self.params["for_super_followers_only"] = for_super_followers
|
||||
return self
|
||||
|
||||
def add_place(self, place_id: str):
|
||||
if place_id:
|
||||
self.params["place_id"] = place_id
|
||||
return self
|
||||
|
||||
def add_poll_options(self, poll_options: list):
|
||||
if poll_options:
|
||||
self.params["poll_options"] = poll_options
|
||||
return self
|
||||
|
||||
def add_poll_duration(self, poll_duration_minutes: int):
|
||||
if poll_duration_minutes:
|
||||
self.params["poll_duration_minutes"] = poll_duration_minutes
|
||||
return self
|
||||
|
||||
def add_quote(self, quote_id: str):
|
||||
if quote_id:
|
||||
self.params["quote_tweet_id"] = quote_id
|
||||
return self
|
||||
|
||||
def add_reply_settings(
|
||||
self,
|
||||
exclude_user_ids: list,
|
||||
reply_to_id: str,
|
||||
settings: TweetReplySettingsFilter,
|
||||
):
|
||||
if exclude_user_ids:
|
||||
self.params["exclude_reply_user_ids"] = exclude_user_ids
|
||||
if reply_to_id:
|
||||
self.params["in_reply_to_tweet_id"] = reply_to_id
|
||||
if settings.All_Users:
|
||||
self.params["reply_settings"] = None
|
||||
elif settings.Following_Users_Only:
|
||||
self.params["reply_settings"] = "following"
|
||||
elif settings.Mentioned_Users_Only:
|
||||
self.params["reply_settings"] = "mentionedUsers"
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
|
||||
|
||||
class TweetGetsBuilder:
|
||||
def __init__(self):
|
||||
self.params: Dict[str, Any] = {"user_auth": False}
|
||||
|
||||
def add_id(self, tweet_id: list[str]):
|
||||
self.params["id"] = tweet_id
|
||||
return self
|
||||
|
||||
def build(self):
|
||||
return self.params
|
||||
234
autogpt_platform/backend/backend/blocks/twitter/_mappers.py
Normal file
234
autogpt_platform/backend/backend/blocks/twitter/_mappers.py
Normal file
@@ -0,0 +1,234 @@
|
||||
# -------------- Tweets -----------------
|
||||
|
||||
# Tweet Expansions
|
||||
EXPANSION_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Poll_IDs": "attachments.poll_ids",
|
||||
"Media_Keys": "attachments.media_keys",
|
||||
"Author_User_ID": "author_id",
|
||||
"Edit_History_Tweet_IDs": "edit_history_tweet_ids",
|
||||
"Mentioned_Usernames": "entities.mentions.username",
|
||||
"Place_ID": "geo.place_id",
|
||||
"Reply_To_User_ID": "in_reply_to_user_id",
|
||||
"Referenced_Tweet_ID": "referenced_tweets.id",
|
||||
"Referenced_Tweet_Author_ID": "referenced_tweets.id.author_id",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_expansion(frontend_key: str) -> str:
|
||||
result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid expansion key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# TweetReplySettings
|
||||
REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Mentioned_Users_Only": "mentionedUsers",
|
||||
"Following_Users_Only": "following",
|
||||
"All_Users": "all",
|
||||
}
|
||||
|
||||
|
||||
# TweetUserFields
|
||||
def get_backend_reply_setting(frontend_key: str) -> str:
|
||||
result = REPLY_SETTINGS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid reply setting key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Account_Creation_Date": "created_at",
|
||||
"User_Bio": "description",
|
||||
"User_Entities": "entities",
|
||||
"User_ID": "id",
|
||||
"User_Location": "location",
|
||||
"Latest_Tweet_ID": "most_recent_tweet_id",
|
||||
"Display_Name": "name",
|
||||
"Pinned_Tweet_ID": "pinned_tweet_id",
|
||||
"Profile_Picture_URL": "profile_image_url",
|
||||
"Is_Protected_Account": "protected",
|
||||
"Account_Statistics": "public_metrics",
|
||||
"Profile_URL": "url",
|
||||
"Username": "username",
|
||||
"Is_Verified": "verified",
|
||||
"Verification_Type": "verified_type",
|
||||
"Content_Withholding_Info": "withheld",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_user_field(frontend_key: str) -> str:
|
||||
result = USER_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid user field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# TweetFields
|
||||
FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Tweet_Attachments": "attachments",
|
||||
"Author_ID": "author_id",
|
||||
"Context_Annotations": "context_annotations",
|
||||
"Conversation_ID": "conversation_id",
|
||||
"Creation_Time": "created_at",
|
||||
"Edit_Controls": "edit_controls",
|
||||
"Tweet_Entities": "entities",
|
||||
"Geographic_Location": "geo",
|
||||
"Tweet_ID": "id",
|
||||
"Reply_To_User_ID": "in_reply_to_user_id",
|
||||
"Language": "lang",
|
||||
"Public_Metrics": "public_metrics",
|
||||
"Sensitive_Content_Flag": "possibly_sensitive",
|
||||
"Referenced_Tweets": "referenced_tweets",
|
||||
"Reply_Settings": "reply_settings",
|
||||
"Tweet_Source": "source",
|
||||
"Tweet_Text": "text",
|
||||
"Withheld_Content": "withheld",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_field(frontend_key: str) -> str:
|
||||
result = FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# TweetPollFields
|
||||
POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Duration_Minutes": "duration_minutes",
|
||||
"End_DateTime": "end_datetime",
|
||||
"Poll_ID": "id",
|
||||
"Poll_Options": "options",
|
||||
"Voting_Status": "voting_status",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_poll_field(frontend_key: str) -> str:
|
||||
result = POLL_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid poll field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Contained_Within_Places": "contained_within",
|
||||
"Country": "country",
|
||||
"Country_Code": "country_code",
|
||||
"Full_Location_Name": "full_name",
|
||||
"Geographic_Coordinates": "geo",
|
||||
"Place_ID": "id",
|
||||
"Place_Name": "name",
|
||||
"Place_Type": "place_type",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_place_field(frontend_key: str) -> str:
|
||||
result = PLACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid place field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# TweetMediaFields
|
||||
MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Duration_in_Milliseconds": "duration_ms",
|
||||
"Height": "height",
|
||||
"Media_Key": "media_key",
|
||||
"Preview_Image_URL": "preview_image_url",
|
||||
"Media_Type": "type",
|
||||
"Media_URL": "url",
|
||||
"Width": "width",
|
||||
"Public_Metrics": "public_metrics",
|
||||
"Non_Public_Metrics": "non_public_metrics",
|
||||
"Organic_Metrics": "organic_metrics",
|
||||
"Promoted_Metrics": "promoted_metrics",
|
||||
"Alternative_Text": "alt_text",
|
||||
"Media_Variants": "variants",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_media_field(frontend_key: str) -> str:
|
||||
result = MEDIA_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid media field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# -------------- Spaces -----------------
|
||||
|
||||
# SpaceExpansions
|
||||
EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE = {
|
||||
"Invited_Users": "invited_user_ids",
|
||||
"Speakers": "speaker_ids",
|
||||
"Creator": "creator_id",
|
||||
"Hosts": "host_ids",
|
||||
"Topics": "topic_ids",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_space_expansion(frontend_key: str) -> str:
|
||||
result = EXPANSION_FRONTEND_TO_BACKEND_MAPPING_SPACE.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid expansion key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# SpaceFields
|
||||
SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"Space_ID": "id",
|
||||
"Space_State": "state",
|
||||
"Creation_Time": "created_at",
|
||||
"End_Time": "ended_at",
|
||||
"Host_User_IDs": "host_ids",
|
||||
"Language": "lang",
|
||||
"Is_Ticketed": "is_ticketed",
|
||||
"Invited_User_IDs": "invited_user_ids",
|
||||
"Participant_Count": "participant_count",
|
||||
"Subscriber_Count": "subscriber_count",
|
||||
"Scheduled_Start_Time": "scheduled_start",
|
||||
"Speaker_User_IDs": "speaker_ids",
|
||||
"Start_Time": "started_at",
|
||||
"Space_Title": "title",
|
||||
"Topic_IDs": "topic_ids",
|
||||
"Last_Updated_Time": "updated_at",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_space_field(frontend_key: str) -> str:
|
||||
result = SPACE_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid space field key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
# -------------- List Expansions -----------------
|
||||
|
||||
# ListExpansions
|
||||
LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING = {"List_Owner_ID": "owner_id"}
|
||||
|
||||
|
||||
def get_backend_list_expansion(frontend_key: str) -> str:
|
||||
result = LIST_EXPANSION_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid list expansion key: {frontend_key}")
|
||||
return result
|
||||
|
||||
|
||||
LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING = {
|
||||
"List_ID": "id",
|
||||
"List_Name": "name",
|
||||
"Creation_Date": "created_at",
|
||||
"Description": "description",
|
||||
"Follower_Count": "follower_count",
|
||||
"Member_Count": "member_count",
|
||||
"Is_Private": "private",
|
||||
"Owner_ID": "owner_id",
|
||||
}
|
||||
|
||||
|
||||
def get_backend_list_field(frontend_key: str) -> str:
|
||||
result = LIST_FIELDS_FRONTEND_TO_BACKEND_MAPPING.get(frontend_key)
|
||||
if result is None:
|
||||
raise KeyError(f"Invalid list field key: {frontend_key}")
|
||||
return result
|
||||
@@ -0,0 +1,76 @@
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
class BaseSerializer:
|
||||
@staticmethod
|
||||
def _serialize_value(value: Any) -> Any:
|
||||
"""Helper method to serialize individual values"""
|
||||
if hasattr(value, "data"):
|
||||
return value.data
|
||||
return value
|
||||
|
||||
|
||||
class IncludesSerializer(BaseSerializer):
|
||||
@classmethod
|
||||
def serialize(cls, includes: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Serializes the includes dictionary"""
|
||||
if not includes:
|
||||
return {}
|
||||
|
||||
serialized_includes = {}
|
||||
for key, value in includes.items():
|
||||
if isinstance(value, list):
|
||||
serialized_includes[key] = [
|
||||
cls._serialize_value(item) for item in value
|
||||
]
|
||||
else:
|
||||
serialized_includes[key] = cls._serialize_value(value)
|
||||
|
||||
return serialized_includes
|
||||
|
||||
|
||||
class ResponseDataSerializer(BaseSerializer):
|
||||
@classmethod
|
||||
def serialize_dict(cls, item: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Serializes a single dictionary item"""
|
||||
serialized_item = {}
|
||||
|
||||
if hasattr(item, "__dict__"):
|
||||
items = item.__dict__.items()
|
||||
else:
|
||||
items = item.items()
|
||||
|
||||
for key, value in items:
|
||||
if isinstance(value, list):
|
||||
serialized_item[key] = [
|
||||
cls._serialize_value(sub_item) for sub_item in value
|
||||
]
|
||||
else:
|
||||
serialized_item[key] = cls._serialize_value(value)
|
||||
|
||||
return serialized_item
|
||||
|
||||
@classmethod
|
||||
def serialize_list(cls, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
||||
"""Serializes a list of dictionary items"""
|
||||
return [cls.serialize_dict(item) for item in data]
|
||||
|
||||
|
||||
class ResponseSerializer:
|
||||
@classmethod
|
||||
def serialize(cls, response) -> Dict[str, Any]:
|
||||
"""Main serializer that handles both data and includes"""
|
||||
result = {"data": None, "included": {}}
|
||||
|
||||
# Handle response.data
|
||||
if response.data:
|
||||
if isinstance(response.data, list):
|
||||
result["data"] = ResponseDataSerializer.serialize_list(response.data)
|
||||
else:
|
||||
result["data"] = ResponseDataSerializer.serialize_dict(response.data)
|
||||
|
||||
# Handle includes
|
||||
if hasattr(response, "includes") and response.includes:
|
||||
result["included"] = IncludesSerializer.serialize(response.includes)
|
||||
|
||||
return result
|
||||
443
autogpt_platform/backend/backend/blocks/twitter/_types.py
Normal file
443
autogpt_platform/backend/backend/blocks/twitter/_types.py
Normal file
@@ -0,0 +1,443 @@
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
# -------------- Tweets -----------------
|
||||
|
||||
|
||||
class TweetReplySettingsFilter(BaseModel):
|
||||
Mentioned_Users_Only: bool = False
|
||||
Following_Users_Only: bool = False
|
||||
All_Users: bool = False
|
||||
|
||||
|
||||
class TweetUserFieldsFilter(BaseModel):
|
||||
Account_Creation_Date: bool = False
|
||||
User_Bio: bool = False
|
||||
User_Entities: bool = False
|
||||
User_ID: bool = False
|
||||
User_Location: bool = False
|
||||
Latest_Tweet_ID: bool = False
|
||||
Display_Name: bool = False
|
||||
Pinned_Tweet_ID: bool = False
|
||||
Profile_Picture_URL: bool = False
|
||||
Is_Protected_Account: bool = False
|
||||
Account_Statistics: bool = False
|
||||
Profile_URL: bool = False
|
||||
Username: bool = False
|
||||
Is_Verified: bool = False
|
||||
Verification_Type: bool = False
|
||||
Content_Withholding_Info: bool = False
|
||||
|
||||
|
||||
class TweetFieldsFilter(BaseModel):
|
||||
Tweet_Attachments: bool = False
|
||||
Author_ID: bool = False
|
||||
Context_Annotations: bool = False
|
||||
Conversation_ID: bool = False
|
||||
Creation_Time: bool = False
|
||||
Edit_Controls: bool = False
|
||||
Tweet_Entities: bool = False
|
||||
Geographic_Location: bool = False
|
||||
Tweet_ID: bool = False
|
||||
Reply_To_User_ID: bool = False
|
||||
Language: bool = False
|
||||
Public_Metrics: bool = False
|
||||
Sensitive_Content_Flag: bool = False
|
||||
Referenced_Tweets: bool = False
|
||||
Reply_Settings: bool = False
|
||||
Tweet_Source: bool = False
|
||||
Tweet_Text: bool = False
|
||||
Withheld_Content: bool = False
|
||||
|
||||
|
||||
class PersonalTweetFieldsFilter(BaseModel):
|
||||
attachments: bool = False
|
||||
author_id: bool = False
|
||||
context_annotations: bool = False
|
||||
conversation_id: bool = False
|
||||
created_at: bool = False
|
||||
edit_controls: bool = False
|
||||
entities: bool = False
|
||||
geo: bool = False
|
||||
id: bool = False
|
||||
in_reply_to_user_id: bool = False
|
||||
lang: bool = False
|
||||
non_public_metrics: bool = False
|
||||
public_metrics: bool = False
|
||||
organic_metrics: bool = False
|
||||
promoted_metrics: bool = False
|
||||
possibly_sensitive: bool = False
|
||||
referenced_tweets: bool = False
|
||||
reply_settings: bool = False
|
||||
source: bool = False
|
||||
text: bool = False
|
||||
withheld: bool = False
|
||||
|
||||
|
||||
class TweetPollFieldsFilter(BaseModel):
|
||||
Duration_Minutes: bool = False
|
||||
End_DateTime: bool = False
|
||||
Poll_ID: bool = False
|
||||
Poll_Options: bool = False
|
||||
Voting_Status: bool = False
|
||||
|
||||
|
||||
class TweetPlaceFieldsFilter(BaseModel):
|
||||
Contained_Within_Places: bool = False
|
||||
Country: bool = False
|
||||
Country_Code: bool = False
|
||||
Full_Location_Name: bool = False
|
||||
Geographic_Coordinates: bool = False
|
||||
Place_ID: bool = False
|
||||
Place_Name: bool = False
|
||||
Place_Type: bool = False
|
||||
|
||||
|
||||
class TweetMediaFieldsFilter(BaseModel):
|
||||
Duration_in_Milliseconds: bool = False
|
||||
Height: bool = False
|
||||
Media_Key: bool = False
|
||||
Preview_Image_URL: bool = False
|
||||
Media_Type: bool = False
|
||||
Media_URL: bool = False
|
||||
Width: bool = False
|
||||
Public_Metrics: bool = False
|
||||
Non_Public_Metrics: bool = False
|
||||
Organic_Metrics: bool = False
|
||||
Promoted_Metrics: bool = False
|
||||
Alternative_Text: bool = False
|
||||
Media_Variants: bool = False
|
||||
|
||||
|
||||
class ExpansionFilter(BaseModel):
|
||||
Poll_IDs: bool = False
|
||||
Media_Keys: bool = False
|
||||
Author_User_ID: bool = False
|
||||
Edit_History_Tweet_IDs: bool = False
|
||||
Mentioned_Usernames: bool = False
|
||||
Place_ID: bool = False
|
||||
Reply_To_User_ID: bool = False
|
||||
Referenced_Tweet_ID: bool = False
|
||||
Referenced_Tweet_Author_ID: bool = False
|
||||
|
||||
|
||||
class TweetExcludesFilter(BaseModel):
|
||||
retweets: bool = False
|
||||
replies: bool = False
|
||||
|
||||
|
||||
# -------------- Users -----------------
|
||||
|
||||
|
||||
class UserExpansionsFilter(BaseModel):
|
||||
pinned_tweet_id: bool = False
|
||||
|
||||
|
||||
# -------------- DM's' -----------------
|
||||
|
||||
|
||||
class DMEventFieldFilter(BaseModel):
|
||||
id: bool = False
|
||||
text: bool = False
|
||||
event_type: bool = False
|
||||
created_at: bool = False
|
||||
dm_conversation_id: bool = False
|
||||
sender_id: bool = False
|
||||
participant_ids: bool = False
|
||||
referenced_tweets: bool = False
|
||||
attachments: bool = False
|
||||
|
||||
|
||||
class DMEventTypeFilter(BaseModel):
|
||||
MessageCreate: bool = False
|
||||
ParticipantsJoin: bool = False
|
||||
ParticipantsLeave: bool = False
|
||||
|
||||
|
||||
class DMEventExpansionFilter(BaseModel):
|
||||
attachments_media_keys: bool = False
|
||||
referenced_tweets_id: bool = False
|
||||
sender_id: bool = False
|
||||
participant_ids: bool = False
|
||||
|
||||
|
||||
class DMMediaFieldFilter(BaseModel):
|
||||
duration_ms: bool = False
|
||||
height: bool = False
|
||||
media_key: bool = False
|
||||
preview_image_url: bool = False
|
||||
type: bool = False
|
||||
url: bool = False
|
||||
width: bool = False
|
||||
public_metrics: bool = False
|
||||
alt_text: bool = False
|
||||
variants: bool = False
|
||||
|
||||
|
||||
class DMTweetFieldFilter(BaseModel):
|
||||
attachments: bool = False
|
||||
author_id: bool = False
|
||||
context_annotations: bool = False
|
||||
conversation_id: bool = False
|
||||
created_at: bool = False
|
||||
edit_controls: bool = False
|
||||
entities: bool = False
|
||||
geo: bool = False
|
||||
id: bool = False
|
||||
in_reply_to_user_id: bool = False
|
||||
lang: bool = False
|
||||
public_metrics: bool = False
|
||||
possibly_sensitive: bool = False
|
||||
referenced_tweets: bool = False
|
||||
reply_settings: bool = False
|
||||
source: bool = False
|
||||
text: bool = False
|
||||
withheld: bool = False
|
||||
|
||||
|
||||
# -------------- Spaces -----------------
|
||||
|
||||
|
||||
class SpaceExpansionsFilter(BaseModel):
|
||||
Invited_Users: bool = False
|
||||
Speakers: bool = False
|
||||
Creator: bool = False
|
||||
Hosts: bool = False
|
||||
Topics: bool = False
|
||||
|
||||
|
||||
class SpaceFieldsFilter(BaseModel):
|
||||
Space_ID: bool = False
|
||||
Space_State: bool = False
|
||||
Creation_Time: bool = False
|
||||
End_Time: bool = False
|
||||
Host_User_IDs: bool = False
|
||||
Language: bool = False
|
||||
Is_Ticketed: bool = False
|
||||
Invited_User_IDs: bool = False
|
||||
Participant_Count: bool = False
|
||||
Subscriber_Count: bool = False
|
||||
Scheduled_Start_Time: bool = False
|
||||
Speaker_User_IDs: bool = False
|
||||
Start_Time: bool = False
|
||||
Space_Title: bool = False
|
||||
Topic_IDs: bool = False
|
||||
Last_Updated_Time: bool = False
|
||||
|
||||
|
||||
class SpaceStatesFilter(str, Enum):
|
||||
live = "live"
|
||||
scheduled = "scheduled"
|
||||
all = "all"
|
||||
|
||||
|
||||
# -------------- List Expansions -----------------
|
||||
|
||||
|
||||
class ListExpansionsFilter(BaseModel):
|
||||
List_Owner_ID: bool = False
|
||||
|
||||
|
||||
class ListFieldsFilter(BaseModel):
|
||||
List_ID: bool = False
|
||||
List_Name: bool = False
|
||||
Creation_Date: bool = False
|
||||
Description: bool = False
|
||||
Follower_Count: bool = False
|
||||
Member_Count: bool = False
|
||||
Is_Private: bool = False
|
||||
Owner_ID: bool = False
|
||||
|
||||
|
||||
# --------- [Input Types] -------------
|
||||
class TweetExpansionInputs(BlockSchema):
|
||||
|
||||
expansions: ExpansionFilter | None = SchemaField(
|
||||
description="Choose what extra information you want to get with your tweets. For example:\n- Select 'Media_Keys' to get media details\n- Select 'Author_User_ID' to get user information\n- Select 'Place_ID' to get location details",
|
||||
placeholder="Pick the extra information you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
media_fields: TweetMediaFieldsFilter | None = SchemaField(
|
||||
description="Select what media information you want to see (images, videos, etc). To use this, you must first select 'Media_Keys' in the expansions above.",
|
||||
placeholder="Choose what media details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
place_fields: TweetPlaceFieldsFilter | None = SchemaField(
|
||||
description="Select what location information you want to see (country, coordinates, etc). To use this, you must first select 'Place_ID' in the expansions above.",
|
||||
placeholder="Choose what location details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
poll_fields: TweetPollFieldsFilter | None = SchemaField(
|
||||
description="Select what poll information you want to see (options, voting status, etc). To use this, you must first select 'Poll_IDs' in the expansions above.",
|
||||
placeholder="Choose what poll details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
tweet_fields: TweetFieldsFilter | None = SchemaField(
|
||||
description="Select what tweet information you want to see. For referenced tweets (like retweets), select 'Referenced_Tweet_ID' in the expansions above.",
|
||||
placeholder="Choose what tweet details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
user_fields: TweetUserFieldsFilter | None = SchemaField(
|
||||
description="Select what user information you want to see. To use this, you must first select one of these in expansions above:\n- 'Author_User_ID' for tweet authors\n- 'Mentioned_Usernames' for mentioned users\n- 'Reply_To_User_ID' for users being replied to\n- 'Referenced_Tweet_Author_ID' for authors of referenced tweets",
|
||||
placeholder="Choose what user details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class DMEventExpansionInputs(BlockSchema):
|
||||
expansions: DMEventExpansionFilter | None = SchemaField(
|
||||
description="Select expansions to include related data objects in the 'includes' section.",
|
||||
placeholder="Enter expansions",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
event_types: DMEventTypeFilter | None = SchemaField(
|
||||
description="Select DM event types to include in the response.",
|
||||
placeholder="Enter event types",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
media_fields: DMMediaFieldFilter | None = SchemaField(
|
||||
description="Select media fields to include in the response (requires expansions=attachments.media_keys).",
|
||||
placeholder="Enter media fields",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
tweet_fields: DMTweetFieldFilter | None = SchemaField(
|
||||
description="Select tweet fields to include in the response (requires expansions=referenced_tweets.id).",
|
||||
placeholder="Enter tweet fields",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
user_fields: TweetUserFieldsFilter | None = SchemaField(
|
||||
description="Select user fields to include in the response (requires expansions=sender_id or participant_ids).",
|
||||
placeholder="Enter user fields",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class UserExpansionInputs(BlockSchema):
|
||||
expansions: UserExpansionsFilter | None = SchemaField(
|
||||
description="Choose what extra information you want to get with user data. Currently only 'pinned_tweet_id' is available to see a user's pinned tweet.",
|
||||
placeholder="Select extra user information to include",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
tweet_fields: TweetFieldsFilter | None = SchemaField(
|
||||
description="Select what tweet information you want to see in pinned tweets. This only works if you select 'pinned_tweet_id' in expansions above.",
|
||||
placeholder="Choose what details to see in pinned tweets",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
user_fields: TweetUserFieldsFilter | None = SchemaField(
|
||||
description="Select what user information you want to see, like username, bio, profile picture, etc.",
|
||||
placeholder="Choose what user details you want to see",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class SpaceExpansionInputs(BlockSchema):
|
||||
expansions: SpaceExpansionsFilter | None = SchemaField(
|
||||
description="Choose additional information you want to get with your Twitter Spaces:\n- Select 'Invited_Users' to see who was invited\n- Select 'Speakers' to see who can speak\n- Select 'Creator' to get details about who made the Space\n- Select 'Hosts' to see who's hosting\n- Select 'Topics' to see Space topics",
|
||||
placeholder="Pick what extra information you want to see about the Space",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
space_fields: SpaceFieldsFilter | None = SchemaField(
|
||||
description="Choose what Space details you want to see, such as:\n- Title\n- Start/End times\n- Number of participants\n- Language\n- State (live/scheduled)\n- And more",
|
||||
placeholder="Choose what Space information you want to get",
|
||||
default=SpaceFieldsFilter(Space_Title=True, Host_User_IDs=True),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
user_fields: TweetUserFieldsFilter | None = SchemaField(
|
||||
description="Choose what user information you want to see. This works when you select any of these in expansions above:\n- 'Creator' for Space creator details\n- 'Hosts' for host information\n- 'Speakers' for speaker details\n- 'Invited_Users' for invited user information",
|
||||
placeholder="Pick what details you want to see about the users",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class ListExpansionInputs(BlockSchema):
|
||||
expansions: ListExpansionsFilter | None = SchemaField(
|
||||
description="Choose what extra information you want to get with your Twitter Lists:\n- Select 'List_Owner_ID' to get details about who owns the list\n\nThis will let you see more details about the list owner when you also select user fields below.",
|
||||
placeholder="Pick what extra list information you want to see",
|
||||
default=ListExpansionsFilter(List_Owner_ID=True),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
user_fields: TweetUserFieldsFilter | None = SchemaField(
|
||||
description="Choose what information you want to see about list owners. This only works when you select 'List_Owner_ID' in expansions above.\n\nYou can see things like:\n- Their username\n- Profile picture\n- Account details\n- And more",
|
||||
placeholder="Select what details you want to see about list owners",
|
||||
default=TweetUserFieldsFilter(User_ID=True, Username=True),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
list_fields: ListFieldsFilter | None = SchemaField(
|
||||
description="Choose what information you want to see about the Twitter Lists themselves, such as:\n- List name\n- Description\n- Number of followers\n- Number of members\n- Whether it's private\n- Creation date\n- And more",
|
||||
placeholder="Pick what list details you want to see",
|
||||
default=ListFieldsFilter(Owner_ID=True),
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class TweetTimeWindowInputs(BlockSchema):
|
||||
start_time: datetime | None = SchemaField(
|
||||
description="Start time in YYYY-MM-DDTHH:mm:ssZ format",
|
||||
placeholder="Enter start time",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
end_time: datetime | None = SchemaField(
|
||||
description="End time in YYYY-MM-DDTHH:mm:ssZ format",
|
||||
placeholder="Enter end time",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
since_id: str | None = SchemaField(
|
||||
description="Returns results with Tweet ID greater than this (more recent than), we give priority to since_id over start_time",
|
||||
placeholder="Enter since ID",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
until_id: str | None = SchemaField(
|
||||
description="Returns results with Tweet ID less than this (that is, older than), and used with since_id",
|
||||
placeholder="Enter until ID",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
sort_order: str | None = SchemaField(
|
||||
description="Order of returned tweets (recency or relevancy)",
|
||||
placeholder="Enter sort order",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
@@ -0,0 +1,201 @@
|
||||
# Todo : Add new Type support
|
||||
|
||||
# from typing import cast
|
||||
# import tweepy
|
||||
# from tweepy.client import Response
|
||||
|
||||
# from backend.blocks.twitter._serializer import IncludesSerializer, ResponseDataSerializer
|
||||
# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
# from backend.data.model import SchemaField
|
||||
# from backend.blocks.twitter._builders import DMExpansionsBuilder
|
||||
# from backend.blocks.twitter._types import DMEventExpansion, DMEventExpansionInputs, DMEventType, DMMediaField, DMTweetField, TweetUserFields
|
||||
# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
# from backend.blocks.twitter._auth import (
|
||||
# TEST_CREDENTIALS,
|
||||
# TEST_CREDENTIALS_INPUT,
|
||||
# TwitterCredentials,
|
||||
# TwitterCredentialsField,
|
||||
# TwitterCredentialsInput,
|
||||
# )
|
||||
|
||||
# Require Pro or Enterprise plan [Manual Testing Required]
|
||||
# class TwitterGetDMEventsBlock(Block):
|
||||
# """
|
||||
# Gets a list of Direct Message events for the authenticated user
|
||||
# """
|
||||
|
||||
# class Input(DMEventExpansionInputs):
|
||||
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
# ["dm.read", "offline.access", "user.read", "tweet.read"]
|
||||
# )
|
||||
|
||||
# dm_conversation_id: str = SchemaField(
|
||||
# description="The ID of the Direct Message conversation",
|
||||
# placeholder="Enter conversation ID",
|
||||
# required=True
|
||||
# )
|
||||
|
||||
# max_results: int = SchemaField(
|
||||
# description="Maximum number of results to return (1-100)",
|
||||
# placeholder="Enter max results",
|
||||
# advanced=True,
|
||||
# default=10,
|
||||
# )
|
||||
|
||||
# pagination_token: str = SchemaField(
|
||||
# description="Token for pagination",
|
||||
# placeholder="Enter pagination token",
|
||||
# advanced=True,
|
||||
# default=""
|
||||
# )
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# # Common outputs
|
||||
# event_ids: list[str] = SchemaField(description="DM Event IDs")
|
||||
# event_texts: list[str] = SchemaField(description="DM Event text contents")
|
||||
# event_types: list[str] = SchemaField(description="Types of DM events")
|
||||
# next_token: str = SchemaField(description="Token for next page of results")
|
||||
|
||||
# # Complete outputs
|
||||
# data: list[dict] = SchemaField(description="Complete DM events data")
|
||||
# included: dict = SchemaField(description="Additional data requested via expansions")
|
||||
# meta: dict = SchemaField(description="Metadata about the response")
|
||||
# error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="dc37a6d4-a62e-11ef-a3a5-03061375737b",
|
||||
# description="This block retrieves Direct Message events for the authenticated user.",
|
||||
# categories={BlockCategory.SOCIAL},
|
||||
# input_schema=TwitterGetDMEventsBlock.Input,
|
||||
# output_schema=TwitterGetDMEventsBlock.Output,
|
||||
# test_input={
|
||||
# "dm_conversation_id": "1234567890",
|
||||
# "max_results": 10,
|
||||
# "credentials": TEST_CREDENTIALS_INPUT,
|
||||
# "expansions": [],
|
||||
# "event_types": [],
|
||||
# "media_fields": [],
|
||||
# "tweet_fields": [],
|
||||
# "user_fields": []
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("event_ids", ["1346889436626259968"]),
|
||||
# ("event_texts", ["Hello just you..."]),
|
||||
# ("event_types", ["MessageCreate"]),
|
||||
# ("next_token", None),
|
||||
# ("data", [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}]),
|
||||
# ("included", {}),
|
||||
# ("meta", {}),
|
||||
# ("error", "")
|
||||
# ],
|
||||
# test_mock={
|
||||
# "get_dm_events": lambda *args, **kwargs: (
|
||||
# [{"id": "1346889436626259968", "text": "Hello just you...", "event_type": "MessageCreate"}],
|
||||
# {},
|
||||
# {},
|
||||
# ["1346889436626259968"],
|
||||
# ["Hello just you..."],
|
||||
# ["MessageCreate"],
|
||||
# None
|
||||
# )
|
||||
# }
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def get_dm_events(
|
||||
# credentials: TwitterCredentials,
|
||||
# dm_conversation_id: str,
|
||||
# max_results: int,
|
||||
# pagination_token: str,
|
||||
# expansions: list[DMEventExpansion],
|
||||
# event_types: list[DMEventType],
|
||||
# media_fields: list[DMMediaField],
|
||||
# tweet_fields: list[DMTweetField],
|
||||
# user_fields: list[TweetUserFields]
|
||||
# ):
|
||||
# try:
|
||||
# client = tweepy.Client(
|
||||
# bearer_token=credentials.access_token.get_secret_value()
|
||||
# )
|
||||
|
||||
# params = {
|
||||
# "dm_conversation_id": dm_conversation_id,
|
||||
# "max_results": max_results,
|
||||
# "pagination_token": None if pagination_token == "" else pagination_token,
|
||||
# "user_auth": False
|
||||
# }
|
||||
|
||||
# params = (DMExpansionsBuilder(params)
|
||||
# .add_expansions(expansions)
|
||||
# .add_event_types(event_types)
|
||||
# .add_media_fields(media_fields)
|
||||
# .add_tweet_fields(tweet_fields)
|
||||
# .add_user_fields(user_fields)
|
||||
# .build())
|
||||
|
||||
# response = cast(Response, client.get_direct_message_events(**params))
|
||||
|
||||
# meta = {}
|
||||
# event_ids = []
|
||||
# event_texts = []
|
||||
# event_types = []
|
||||
# next_token = None
|
||||
|
||||
# if response.meta:
|
||||
# meta = response.meta
|
||||
# next_token = meta.get("next_token")
|
||||
|
||||
# included = IncludesSerializer.serialize(response.includes)
|
||||
# data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
# if response.data:
|
||||
# event_ids = [str(item.id) for item in response.data]
|
||||
# event_texts = [item.text if hasattr(item, "text") else None for item in response.data]
|
||||
# event_types = [item.event_type for item in response.data]
|
||||
|
||||
# return data, included, meta, event_ids, event_texts, event_types, next_token
|
||||
|
||||
# raise Exception("No DM events found")
|
||||
|
||||
# except tweepy.TweepyException:
|
||||
# raise
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TwitterCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# event_data, included, meta, event_ids, event_texts, event_types, next_token = self.get_dm_events(
|
||||
# credentials,
|
||||
# input_data.dm_conversation_id,
|
||||
# input_data.max_results,
|
||||
# input_data.pagination_token,
|
||||
# input_data.expansions,
|
||||
# input_data.event_types,
|
||||
# input_data.media_fields,
|
||||
# input_data.tweet_fields,
|
||||
# input_data.user_fields
|
||||
# )
|
||||
|
||||
# if event_ids:
|
||||
# yield "event_ids", event_ids
|
||||
# if event_texts:
|
||||
# yield "event_texts", event_texts
|
||||
# if event_types:
|
||||
# yield "event_types", event_types
|
||||
# if next_token:
|
||||
# yield "next_token", next_token
|
||||
# if event_data:
|
||||
# yield "data", event_data
|
||||
# if included:
|
||||
# yield "included", included
|
||||
# if meta:
|
||||
# yield "meta", meta
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,260 @@
|
||||
# Todo : Add new Type support
|
||||
|
||||
# from typing import cast
|
||||
|
||||
# import tweepy
|
||||
# from tweepy.client import Response
|
||||
|
||||
# from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
# from backend.data.model import SchemaField
|
||||
# from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
# from backend.blocks.twitter._auth import (
|
||||
# TEST_CREDENTIALS,
|
||||
# TEST_CREDENTIALS_INPUT,
|
||||
# TwitterCredentials,
|
||||
# TwitterCredentialsField,
|
||||
# TwitterCredentialsInput,
|
||||
# )
|
||||
|
||||
# Pro and Enterprise plan [Manual Testing Required]
|
||||
# class TwitterSendDirectMessageBlock(Block):
|
||||
# """
|
||||
# Sends a direct message to a Twitter user
|
||||
# """
|
||||
|
||||
# class Input(BlockSchema):
|
||||
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
# ["offline.access", "direct_messages.write"]
|
||||
# )
|
||||
|
||||
# participant_id: str = SchemaField(
|
||||
# description="The User ID of the account to send DM to",
|
||||
# placeholder="Enter recipient user ID",
|
||||
# default="",
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# dm_conversation_id: str = SchemaField(
|
||||
# description="The conversation ID to send message to",
|
||||
# placeholder="Enter conversation ID",
|
||||
# default="",
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# text: str = SchemaField(
|
||||
# description="Text of the Direct Message (up to 10,000 characters)",
|
||||
# placeholder="Enter message text",
|
||||
# default="",
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# media_id: str = SchemaField(
|
||||
# description="Media ID to attach to the message",
|
||||
# placeholder="Enter media ID",
|
||||
# default=""
|
||||
# )
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# dm_event_id: str = SchemaField(description="ID of the sent direct message")
|
||||
# dm_conversation_id_: str = SchemaField(description="ID of the conversation")
|
||||
# error: str = SchemaField(description="Error message if sending failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="f32f2786-a62e-11ef-a93d-a3ef199dde7f",
|
||||
# description="This block sends a direct message to a specified Twitter user.",
|
||||
# categories={BlockCategory.SOCIAL},
|
||||
# input_schema=TwitterSendDirectMessageBlock.Input,
|
||||
# output_schema=TwitterSendDirectMessageBlock.Output,
|
||||
# test_input={
|
||||
# "participant_id": "783214",
|
||||
# "dm_conversation_id": "",
|
||||
# "text": "Hello from Twitter API",
|
||||
# "media_id": "",
|
||||
# "credentials": TEST_CREDENTIALS_INPUT
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("dm_event_id", "0987654321"),
|
||||
# ("dm_conversation_id_", "1234567890"),
|
||||
# ("error", "")
|
||||
# ],
|
||||
# test_mock={
|
||||
# "send_direct_message": lambda *args, **kwargs: (
|
||||
# "0987654321",
|
||||
# "1234567890"
|
||||
# )
|
||||
# },
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def send_direct_message(
|
||||
# credentials: TwitterCredentials,
|
||||
# participant_id: str,
|
||||
# dm_conversation_id: str,
|
||||
# text: str,
|
||||
# media_id: str
|
||||
# ):
|
||||
# try:
|
||||
# client = tweepy.Client(
|
||||
# bearer_token=credentials.access_token.get_secret_value()
|
||||
# )
|
||||
|
||||
# response = cast(
|
||||
# Response,
|
||||
# client.create_direct_message(
|
||||
# participant_id=None if participant_id == "" else participant_id,
|
||||
# dm_conversation_id=None if dm_conversation_id == "" else dm_conversation_id,
|
||||
# text=None if text == "" else text,
|
||||
# media_id=None if media_id == "" else media_id,
|
||||
# user_auth=False
|
||||
# )
|
||||
# )
|
||||
|
||||
# if not response.data:
|
||||
# raise Exception("Failed to send direct message")
|
||||
|
||||
# return response.data["dm_event_id"], response.data["dm_conversation_id"]
|
||||
|
||||
# except tweepy.TweepyException:
|
||||
# raise
|
||||
# except Exception as e:
|
||||
# print(f"Unexpected error: {str(e)}")
|
||||
# raise
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TwitterCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# dm_event_id, dm_conversation_id = self.send_direct_message(
|
||||
# credentials,
|
||||
# input_data.participant_id,
|
||||
# input_data.dm_conversation_id,
|
||||
# input_data.text,
|
||||
# input_data.media_id
|
||||
# )
|
||||
# yield "dm_event_id", dm_event_id
|
||||
# yield "dm_conversation_id", dm_conversation_id
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", handle_tweepy_exception(e)
|
||||
|
||||
# class TwitterCreateDMConversationBlock(Block):
|
||||
# """
|
||||
# Creates a new group direct message conversation on Twitter
|
||||
# """
|
||||
|
||||
# class Input(BlockSchema):
|
||||
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
# ["offline.access", "dm.write","dm.read","tweet.read","user.read"]
|
||||
# )
|
||||
|
||||
# participant_ids: list[str] = SchemaField(
|
||||
# description="Array of User IDs to create conversation with (max 50)",
|
||||
# placeholder="Enter participant user IDs",
|
||||
# default=[],
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# text: str = SchemaField(
|
||||
# description="Text of the Direct Message (up to 10,000 characters)",
|
||||
# placeholder="Enter message text",
|
||||
# default="",
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# media_id: str = SchemaField(
|
||||
# description="Media ID to attach to the message",
|
||||
# placeholder="Enter media ID",
|
||||
# default="",
|
||||
# advanced=False
|
||||
# )
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# dm_event_id: str = SchemaField(description="ID of the sent direct message")
|
||||
# dm_conversation_id: str = SchemaField(description="ID of the conversation")
|
||||
# error: str = SchemaField(description="Error message if sending failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="ec11cabc-a62e-11ef-8c0e-3fe37ba2ec92",
|
||||
# description="This block creates a new group DM conversation with specified Twitter users.",
|
||||
# categories={BlockCategory.SOCIAL},
|
||||
# input_schema=TwitterCreateDMConversationBlock.Input,
|
||||
# output_schema=TwitterCreateDMConversationBlock.Output,
|
||||
# test_input={
|
||||
# "participant_ids": ["783214", "2244994945"],
|
||||
# "text": "Hello from Twitter API",
|
||||
# "media_id": "",
|
||||
# "credentials": TEST_CREDENTIALS_INPUT
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("dm_event_id", "0987654321"),
|
||||
# ("dm_conversation_id", "1234567890"),
|
||||
# ("error", "")
|
||||
# ],
|
||||
# test_mock={
|
||||
# "create_dm_conversation": lambda *args, **kwargs: (
|
||||
# "0987654321",
|
||||
# "1234567890"
|
||||
# )
|
||||
# },
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def create_dm_conversation(
|
||||
# credentials: TwitterCredentials,
|
||||
# participant_ids: list[str],
|
||||
# text: str,
|
||||
# media_id: str
|
||||
# ):
|
||||
# try:
|
||||
# client = tweepy.Client(
|
||||
# bearer_token=credentials.access_token.get_secret_value()
|
||||
# )
|
||||
|
||||
# response = cast(
|
||||
# Response,
|
||||
# client.create_direct_message_conversation(
|
||||
# participant_ids=participant_ids,
|
||||
# text=None if text == "" else text,
|
||||
# media_id=None if media_id == "" else media_id,
|
||||
# user_auth=False
|
||||
# )
|
||||
# )
|
||||
|
||||
# if not response.data:
|
||||
# raise Exception("Failed to create DM conversation")
|
||||
|
||||
# return response.data["dm_event_id"], response.data["dm_conversation_id"]
|
||||
|
||||
# except tweepy.TweepyException:
|
||||
# raise
|
||||
# except Exception as e:
|
||||
# print(f"Unexpected error: {str(e)}")
|
||||
# raise
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TwitterCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# dm_event_id, dm_conversation_id = self.create_dm_conversation(
|
||||
# credentials,
|
||||
# input_data.participant_ids,
|
||||
# input_data.text,
|
||||
# input_data.media_id
|
||||
# )
|
||||
# yield "dm_event_id", dm_event_id
|
||||
# yield "dm_conversation_id", dm_conversation_id
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,470 @@
|
||||
# from typing import cast
|
||||
import tweepy
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
|
||||
# from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
# from backend.blocks.twitter._types import TweetFields, TweetUserFields, UserExpansionInputs, UserExpansions
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
# from tweepy.client import Response
|
||||
|
||||
|
||||
class TwitterUnfollowListBlock(Block):
|
||||
"""
|
||||
Unfollows a Twitter list for the authenticated user
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["follows.write", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to unfollow",
|
||||
placeholder="Enter list ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the unfollow was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="1f43310a-a62f-11ef-8276-2b06a1bbae1a",
|
||||
description="This block unfollows a specified Twitter list for the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnfollowListBlock.Input,
|
||||
output_schema=TwitterUnfollowListBlock.Output,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unfollow_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unfollow_list(credentials: TwitterCredentials, list_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unfollow_list(list_id=list_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unfollow_list(credentials, input_data.list_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterFollowListBlock(Block):
|
||||
"""
|
||||
Follows a Twitter list for the authenticated user
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "list.write", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to follow",
|
||||
placeholder="Enter list ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the follow was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="03d8acf6-a62f-11ef-b17f-b72b04a09e79",
|
||||
description="This block follows a specified Twitter list for the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterFollowListBlock.Input,
|
||||
output_schema=TwitterFollowListBlock.Output,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"follow_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def follow_list(credentials: TwitterCredentials, list_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.follow_list(list_id=list_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.follow_list(credentials, input_data.list_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
# Enterprise Level [Need to do Manual testing], There is a high possibility that we might get error in this
|
||||
# Needs Type Input in this
|
||||
|
||||
# class TwitterListGetFollowersBlock(Block):
|
||||
# """
|
||||
# Gets followers of a specified Twitter list
|
||||
# """
|
||||
|
||||
# class Input(UserExpansionInputs):
|
||||
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
# ["tweet.read","users.read", "list.read", "offline.access"]
|
||||
# )
|
||||
|
||||
# list_id: str = SchemaField(
|
||||
# description="The ID of the List to get followers for",
|
||||
# placeholder="Enter list ID",
|
||||
# required=True
|
||||
# )
|
||||
|
||||
# max_results: int = SchemaField(
|
||||
# description="Max number of results per page (1-100)",
|
||||
# placeholder="Enter max results",
|
||||
# default=10,
|
||||
# advanced=True,
|
||||
# )
|
||||
|
||||
# pagination_token: str = SchemaField(
|
||||
# description="Token for pagination",
|
||||
# placeholder="Enter pagination token",
|
||||
# default="",
|
||||
# advanced=True,
|
||||
# )
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# user_ids: list[str] = SchemaField(description="List of user IDs of followers")
|
||||
# usernames: list[str] = SchemaField(description="List of usernames of followers")
|
||||
# next_token: str = SchemaField(description="Token for next page of results")
|
||||
# data: list[dict] = SchemaField(description="Complete follower data")
|
||||
# included: dict = SchemaField(description="Additional data requested via expansions")
|
||||
# meta: dict = SchemaField(description="Metadata about the response")
|
||||
# error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="16b289b4-a62f-11ef-95d4-bb29b849eb99",
|
||||
# description="This block retrieves followers of a specified Twitter list.",
|
||||
# categories={BlockCategory.SOCIAL},
|
||||
# input_schema=TwitterListGetFollowersBlock.Input,
|
||||
# output_schema=TwitterListGetFollowersBlock.Output,
|
||||
# test_input={
|
||||
# "list_id": "123456789",
|
||||
# "max_results": 10,
|
||||
# "pagination_token": None,
|
||||
# "credentials": TEST_CREDENTIALS_INPUT,
|
||||
# "expansions": [],
|
||||
# "tweet_fields": [],
|
||||
# "user_fields": []
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("user_ids", ["2244994945"]),
|
||||
# ("usernames", ["testuser"]),
|
||||
# ("next_token", None),
|
||||
# ("data", {"followers": [{"id": "2244994945", "username": "testuser"}]}),
|
||||
# ("included", {}),
|
||||
# ("meta", {}),
|
||||
# ("error", "")
|
||||
# ],
|
||||
# test_mock={
|
||||
# "get_list_followers": lambda *args, **kwargs: ({
|
||||
# "followers": [{"id": "2244994945", "username": "testuser"}]
|
||||
# }, {}, {}, ["2244994945"], ["testuser"], None)
|
||||
# }
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def get_list_followers(
|
||||
# credentials: TwitterCredentials,
|
||||
# list_id: str,
|
||||
# max_results: int,
|
||||
# pagination_token: str,
|
||||
# expansions: list[UserExpansions],
|
||||
# tweet_fields: list[TweetFields],
|
||||
# user_fields: list[TweetUserFields]
|
||||
# ):
|
||||
# try:
|
||||
# client = tweepy.Client(
|
||||
# bearer_token=credentials.access_token.get_secret_value(),
|
||||
# )
|
||||
|
||||
# params = {
|
||||
# "id": list_id,
|
||||
# "max_results": max_results,
|
||||
# "pagination_token": None if pagination_token == "" else pagination_token,
|
||||
# "user_auth": False
|
||||
# }
|
||||
|
||||
# params = (UserExpansionsBuilder(params)
|
||||
# .add_expansions(expansions)
|
||||
# .add_tweet_fields(tweet_fields)
|
||||
# .add_user_fields(user_fields)
|
||||
# .build())
|
||||
|
||||
# response = cast(
|
||||
# Response,
|
||||
# client.get_list_followers(**params)
|
||||
# )
|
||||
|
||||
# meta = {}
|
||||
# user_ids = []
|
||||
# usernames = []
|
||||
# next_token = None
|
||||
|
||||
# if response.meta:
|
||||
# meta = response.meta
|
||||
# next_token = meta.get("next_token")
|
||||
|
||||
# included = IncludesSerializer.serialize(response.includes)
|
||||
# data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
# if response.data:
|
||||
# user_ids = [str(item.id) for item in response.data]
|
||||
# usernames = [item.username for item in response.data]
|
||||
|
||||
# return data, included, meta, user_ids, usernames, next_token
|
||||
|
||||
# raise Exception("No followers found")
|
||||
|
||||
# except tweepy.TweepyException:
|
||||
# raise
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TwitterCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# followers_data, included, meta, user_ids, usernames, next_token = self.get_list_followers(
|
||||
# credentials,
|
||||
# input_data.list_id,
|
||||
# input_data.max_results,
|
||||
# input_data.pagination_token,
|
||||
# input_data.expansions,
|
||||
# input_data.tweet_fields,
|
||||
# input_data.user_fields
|
||||
# )
|
||||
|
||||
# if user_ids:
|
||||
# yield "user_ids", user_ids
|
||||
# if usernames:
|
||||
# yield "usernames", usernames
|
||||
# if next_token:
|
||||
# yield "next_token", next_token
|
||||
# if followers_data:
|
||||
# yield "data", followers_data
|
||||
# if included:
|
||||
# yield "included", included
|
||||
# if meta:
|
||||
# yield "meta", meta
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", handle_tweepy_exception(e)
|
||||
|
||||
# class TwitterGetFollowedListsBlock(Block):
|
||||
# """
|
||||
# Gets lists followed by a specified Twitter user
|
||||
# """
|
||||
|
||||
# class Input(UserExpansionInputs):
|
||||
# credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
# ["follows.read", "users.read", "list.read", "offline.access"]
|
||||
# )
|
||||
|
||||
# user_id: str = SchemaField(
|
||||
# description="The user ID whose followed Lists to retrieve",
|
||||
# placeholder="Enter user ID",
|
||||
# required=True
|
||||
# )
|
||||
|
||||
# max_results: int = SchemaField(
|
||||
# description="Max number of results per page (1-100)",
|
||||
# placeholder="Enter max results",
|
||||
# default=10,
|
||||
# advanced=True,
|
||||
# )
|
||||
|
||||
# pagination_token: str = SchemaField(
|
||||
# description="Token for pagination",
|
||||
# placeholder="Enter pagination token",
|
||||
# default="",
|
||||
# advanced=True,
|
||||
# )
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# list_ids: list[str] = SchemaField(description="List of list IDs")
|
||||
# list_names: list[str] = SchemaField(description="List of list names")
|
||||
# data: list[dict] = SchemaField(description="Complete list data")
|
||||
# includes: dict = SchemaField(description="Additional data requested via expansions")
|
||||
# meta: dict = SchemaField(description="Metadata about the response")
|
||||
# next_token: str = SchemaField(description="Token for next page of results")
|
||||
# error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="0e18bbfc-a62f-11ef-94fa-1f1e174b809e",
|
||||
# description="This block retrieves all Lists a specified user follows.",
|
||||
# categories={BlockCategory.SOCIAL},
|
||||
# input_schema=TwitterGetFollowedListsBlock.Input,
|
||||
# output_schema=TwitterGetFollowedListsBlock.Output,
|
||||
# test_input={
|
||||
# "user_id": "123456789",
|
||||
# "max_results": 10,
|
||||
# "pagination_token": None,
|
||||
# "credentials": TEST_CREDENTIALS_INPUT,
|
||||
# "expansions": [],
|
||||
# "tweet_fields": [],
|
||||
# "user_fields": []
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("list_ids", ["12345"]),
|
||||
# ("list_names", ["Test List"]),
|
||||
# ("data", {"followed_lists": [{"id": "12345", "name": "Test List"}]}),
|
||||
# ("includes", {}),
|
||||
# ("meta", {}),
|
||||
# ("next_token", None),
|
||||
# ("error", "")
|
||||
# ],
|
||||
# test_mock={
|
||||
# "get_followed_lists": lambda *args, **kwargs: ({
|
||||
# "followed_lists": [{"id": "12345", "name": "Test List"}]
|
||||
# }, {}, {}, ["12345"], ["Test List"], None)
|
||||
# }
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def get_followed_lists(
|
||||
# credentials: TwitterCredentials,
|
||||
# user_id: str,
|
||||
# max_results: int,
|
||||
# pagination_token: str,
|
||||
# expansions: list[UserExpansions],
|
||||
# tweet_fields: list[TweetFields],
|
||||
# user_fields: list[TweetUserFields]
|
||||
# ):
|
||||
# try:
|
||||
# client = tweepy.Client(
|
||||
# bearer_token=credentials.access_token.get_secret_value(),
|
||||
# )
|
||||
|
||||
# params = {
|
||||
# "id": user_id,
|
||||
# "max_results": max_results,
|
||||
# "pagination_token": None if pagination_token == "" else pagination_token,
|
||||
# "user_auth": False
|
||||
# }
|
||||
|
||||
# params = (UserExpansionsBuilder(params)
|
||||
# .add_expansions(expansions)
|
||||
# .add_tweet_fields(tweet_fields)
|
||||
# .add_user_fields(user_fields)
|
||||
# .build())
|
||||
|
||||
# response = cast(
|
||||
# Response,
|
||||
# client.get_followed_lists(**params)
|
||||
# )
|
||||
|
||||
# meta = {}
|
||||
# list_ids = []
|
||||
# list_names = []
|
||||
# next_token = None
|
||||
|
||||
# if response.meta:
|
||||
# meta = response.meta
|
||||
# next_token = meta.get("next_token")
|
||||
|
||||
# included = IncludesSerializer.serialize(response.includes)
|
||||
# data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
# if response.data:
|
||||
# list_ids = [str(item.id) for item in response.data]
|
||||
# list_names = [item.name for item in response.data]
|
||||
|
||||
# return data, included, meta, list_ids, list_names, next_token
|
||||
|
||||
# raise Exception("No followed lists found")
|
||||
|
||||
# except tweepy.TweepyException:
|
||||
# raise
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TwitterCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# lists_data, included, meta, list_ids, list_names, next_token = self.get_followed_lists(
|
||||
# credentials,
|
||||
# input_data.user_id,
|
||||
# input_data.max_results,
|
||||
# input_data.pagination_token,
|
||||
# input_data.expansions,
|
||||
# input_data.tweet_fields,
|
||||
# input_data.user_fields
|
||||
# )
|
||||
|
||||
# if list_ids:
|
||||
# yield "list_ids", list_ids
|
||||
# if list_names:
|
||||
# yield "list_names", list_names
|
||||
# if next_token:
|
||||
# yield "next_token", next_token
|
||||
# if lists_data:
|
||||
# yield "data", lists_data
|
||||
# if included:
|
||||
# yield "includes", included
|
||||
# if meta:
|
||||
# yield "meta", meta
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,340 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import ListExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ListExpansionInputs,
|
||||
ListExpansionsFilter,
|
||||
ListFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetListBlock(Block):
|
||||
"""
|
||||
Gets information about a Twitter List specified by ID
|
||||
"""
|
||||
|
||||
class Input(ListExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to lookup",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
id: str = SchemaField(description="ID of the Twitter List")
|
||||
name: str = SchemaField(description="Name of the Twitter List")
|
||||
owner_id: str = SchemaField(description="ID of the List owner")
|
||||
owner_username: str = SchemaField(description="Username of the List owner")
|
||||
|
||||
# Complete outputs
|
||||
data: dict = SchemaField(description="Complete list data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about the response")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="34ebc80a-a62f-11ef-9c2a-3fcab6c07079",
|
||||
description="This block retrieves information about a specified Twitter List.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListBlock.Input,
|
||||
output_schema=TwitterGetListBlock.Output,
|
||||
test_input={
|
||||
"list_id": "84839422",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"list_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "84839422"),
|
||||
("name", "Official Twitter Accounts"),
|
||||
("owner_id", "2244994945"),
|
||||
("owner_username", "TwitterAPI"),
|
||||
("data", {"id": "84839422", "name": "Official Twitter Accounts"}),
|
||||
],
|
||||
test_mock={
|
||||
"get_list": lambda *args, **kwargs: (
|
||||
{"id": "84839422", "name": "Official Twitter Accounts"},
|
||||
{},
|
||||
{},
|
||||
"2244994945",
|
||||
"TwitterAPI",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_list(
|
||||
credentials: TwitterCredentials,
|
||||
list_id: str,
|
||||
expansions: ListExpansionsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
list_fields: ListFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {"id": list_id, "user_auth": False}
|
||||
|
||||
params = (
|
||||
ListExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_user_fields(user_fields)
|
||||
.add_list_fields(list_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_list(**params))
|
||||
|
||||
meta = {}
|
||||
owner_id = ""
|
||||
owner_username = ""
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data_dict = ResponseDataSerializer.serialize_dict(response.data)
|
||||
|
||||
if "users" in included:
|
||||
owner_id = str(included["users"][0]["id"])
|
||||
owner_username = included["users"][0]["username"]
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
|
||||
if response.data:
|
||||
return data_dict, included, meta, owner_id, owner_username
|
||||
|
||||
raise Exception("List not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
list_data, included, meta, owner_id, owner_username = self.get_list(
|
||||
credentials,
|
||||
input_data.list_id,
|
||||
input_data.expansions,
|
||||
input_data.user_fields,
|
||||
input_data.list_fields,
|
||||
)
|
||||
|
||||
yield "id", str(list_data["id"])
|
||||
yield "name", list_data["name"]
|
||||
if owner_id:
|
||||
yield "owner_id", owner_id
|
||||
if owner_username:
|
||||
yield "owner_username", owner_username
|
||||
yield "data", {"id": list_data["id"], "name": list_data["name"]}
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetOwnedListsBlock(Block):
|
||||
"""
|
||||
Gets all Lists owned by the specified user
|
||||
"""
|
||||
|
||||
class Input(ListExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "list.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The user ID whose owned Lists to retrieve",
|
||||
placeholder="Enter user ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (1-100)",
|
||||
placeholder="Enter max results (default 100)",
|
||||
advanced=True,
|
||||
default=10,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination",
|
||||
placeholder="Enter pagination token",
|
||||
advanced=True,
|
||||
default="",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
list_ids: list[str] = SchemaField(description="List ids of the owned lists")
|
||||
list_names: list[str] = SchemaField(description="List names of the owned lists")
|
||||
next_token: str = SchemaField(description="Token for next page of results")
|
||||
|
||||
# Complete outputs
|
||||
data: list[dict] = SchemaField(description="Complete owned lists data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about the response")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2b6bdb26-a62f-11ef-a9ce-ff89c2568726",
|
||||
description="This block retrieves all Lists owned by a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetOwnedListsBlock.Input,
|
||||
output_schema=TwitterGetOwnedListsBlock.Output,
|
||||
test_input={
|
||||
"user_id": "2244994945",
|
||||
"max_results": 10,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"list_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("list_ids", ["84839422"]),
|
||||
("list_names", ["Official Twitter Accounts"]),
|
||||
("data", [{"id": "84839422", "name": "Official Twitter Accounts"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_owned_lists": lambda *args, **kwargs: (
|
||||
[{"id": "84839422", "name": "Official Twitter Accounts"}],
|
||||
{},
|
||||
{},
|
||||
["84839422"],
|
||||
["Official Twitter Accounts"],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_owned_lists(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: ListExpansionsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
list_fields: ListFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
ListExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_user_fields(user_fields)
|
||||
.add_list_fields(list_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_owned_lists(**params))
|
||||
|
||||
meta = {}
|
||||
list_ids = []
|
||||
list_names = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
list_ids = [str(item.id) for item in response.data]
|
||||
list_names = [item.name for item in response.data]
|
||||
|
||||
return data, included, meta, list_ids, list_names, next_token
|
||||
|
||||
raise Exception("Lists not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
list_data, included, meta, list_ids, list_names, next_token = (
|
||||
self.get_owned_lists(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.user_fields,
|
||||
input_data.list_fields,
|
||||
)
|
||||
)
|
||||
|
||||
if list_ids:
|
||||
yield "list_ids", list_ids
|
||||
if list_names:
|
||||
yield "list_names", list_names
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if list_data:
|
||||
yield "data", list_data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,523 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import (
|
||||
ListExpansionsBuilder,
|
||||
UserExpansionsBuilder,
|
||||
)
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ListExpansionInputs,
|
||||
ListExpansionsFilter,
|
||||
ListFieldsFilter,
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterRemoveListMemberBlock(Block):
|
||||
"""
|
||||
Removes a member from a Twitter List that the authenticated user owns
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "users.read", "tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to remove the member from",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user to remove from the List",
|
||||
placeholder="Enter user ID to remove",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the member was successfully removed"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the removal failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5a3d1320-a62f-11ef-b7ce-a79e7656bcb0",
|
||||
description="This block removes a specified user from a Twitter List owned by the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveListMemberBlock.Input,
|
||||
output_schema=TwitterRemoveListMemberBlock.Output,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"user_id": "987654321",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"remove_list_member": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_list_member(credentials: TwitterCredentials, list_id: str, user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
client.remove_list_member(id=list_id, user_id=user_id, user_auth=False)
|
||||
return True
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.remove_list_member(
|
||||
credentials, input_data.list_id, input_data.user_id
|
||||
)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterAddListMemberBlock(Block):
|
||||
"""
|
||||
Adds a member to a Twitter List that the authenticated user owns
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "users.read", "tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to add the member to",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user to add to the List",
|
||||
placeholder="Enter user ID to add",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the member was successfully added"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the addition failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3ee8284e-a62f-11ef-84e4-8f6e2cbf0ddb",
|
||||
description="This block adds a specified user to a Twitter List owned by the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterAddListMemberBlock.Input,
|
||||
output_schema=TwitterAddListMemberBlock.Output,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"user_id": "987654321",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"add_list_member": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_list_member(credentials: TwitterCredentials, list_id: str, user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
client.add_list_member(id=list_id, user_id=user_id, user_auth=False)
|
||||
return True
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.add_list_member(
|
||||
credentials, input_data.list_id, input_data.user_id
|
||||
)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetListMembersBlock(Block):
|
||||
"""
|
||||
Gets the members of a specified Twitter List
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to get members from",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (1-100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
ids: list[str] = SchemaField(description="List of member user IDs")
|
||||
usernames: list[str] = SchemaField(description="List of member usernames")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
data: list[dict] = SchemaField(
|
||||
description="Complete user data for list members"
|
||||
)
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4dba046e-a62f-11ef-b69a-87240c84b4c7",
|
||||
description="This block retrieves the members of a specified Twitter List.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListMembersBlock.Input,
|
||||
output_schema=TwitterGetListMembersBlock.Output,
|
||||
test_input={
|
||||
"list_id": "123456789",
|
||||
"max_results": 2,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["12345", "67890"]),
|
||||
("usernames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "12345", "username": "testuser1"},
|
||||
{"id": "67890", "username": "testuser2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_list_members": lambda *args, **kwargs: (
|
||||
["12345", "67890"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "12345", "username": "testuser1"},
|
||||
{"id": "67890", "username": "testuser2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_list_members(
|
||||
credentials: TwitterCredentials,
|
||||
list_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": list_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_list_members(**params))
|
||||
|
||||
meta = {}
|
||||
next_token = None
|
||||
user_ids = []
|
||||
usernames = []
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
user_ids = [str(user.id) for user in response.data]
|
||||
usernames = [user.username for user in response.data]
|
||||
return user_ids, usernames, data, included, meta, next_token
|
||||
|
||||
raise Exception("List members not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, usernames, data, included, meta, next_token = self.get_list_members(
|
||||
credentials,
|
||||
input_data.list_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetListMembershipsBlock(Block):
|
||||
"""
|
||||
Gets all Lists that a specified user is a member of
|
||||
"""
|
||||
|
||||
class Input(ListExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user whose List memberships to retrieve",
|
||||
placeholder="Enter user ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (1-100)",
|
||||
placeholder="Enter max results",
|
||||
advanced=True,
|
||||
default=10,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination of results",
|
||||
placeholder="Enter pagination token",
|
||||
advanced=True,
|
||||
default="",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
list_ids: list[str] = SchemaField(description="List of list IDs")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
data: list[dict] = SchemaField(description="List membership data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about pagination")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="46e6429c-a62f-11ef-81c0-2b55bc7823ba",
|
||||
description="This block retrieves all Lists that a specified user is a member of.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListMembershipsBlock.Input,
|
||||
output_schema=TwitterGetListMembershipsBlock.Output,
|
||||
test_input={
|
||||
"user_id": "123456789",
|
||||
"max_results": 1,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"list_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("list_ids", ["84839422"]),
|
||||
("data", [{"id": "84839422"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_list_memberships": lambda *args, **kwargs: (
|
||||
[{"id": "84839422"}],
|
||||
{},
|
||||
{},
|
||||
["84839422"],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_list_memberships(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: ListExpansionsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
list_fields: ListFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
ListExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_user_fields(user_fields)
|
||||
.add_list_fields(list_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_list_memberships(**params))
|
||||
|
||||
meta = {}
|
||||
next_token = None
|
||||
list_ids = []
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
list_ids = [str(lst.id) for lst in response.data]
|
||||
return data, included, meta, list_ids, next_token
|
||||
|
||||
raise Exception("List memberships not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, meta, list_ids, next_token = self.get_list_memberships(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.user_fields,
|
||||
input_data.list_fields,
|
||||
)
|
||||
|
||||
if list_ids:
|
||||
yield "list_ids", list_ids
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,215 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import TweetExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetListTweetsBlock(Block):
|
||||
"""
|
||||
Gets tweets from a specified Twitter list
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List whose Tweets you would like to retrieve",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (1-100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for paginating through results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
tweet_ids: list[str] = SchemaField(description="List of tweet IDs")
|
||||
texts: list[str] = SchemaField(description="List of tweet texts")
|
||||
next_token: str = SchemaField(description="Token for next page of results")
|
||||
|
||||
# Complete outputs
|
||||
data: list[dict] = SchemaField(description="Complete list tweets data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Response metadata including pagination tokens"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="6657edb0-a62f-11ef-8c10-0326d832467d",
|
||||
description="This block retrieves tweets from a specified Twitter list.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetListTweetsBlock.Input,
|
||||
output_schema=TwitterGetListTweetsBlock.Output,
|
||||
test_input={
|
||||
"list_id": "84839422",
|
||||
"max_results": 1,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("tweet_ids", ["1234567890"]),
|
||||
("texts", ["Test tweet"]),
|
||||
("data", [{"id": "1234567890", "text": "Test tweet"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_list_tweets": lambda *args, **kwargs: (
|
||||
[{"id": "1234567890", "text": "Test tweet"}],
|
||||
{},
|
||||
{},
|
||||
["1234567890"],
|
||||
["Test tweet"],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_list_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
list_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": list_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_list_tweets(**params))
|
||||
|
||||
meta = {}
|
||||
tweet_ids = []
|
||||
texts = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(item.id) for item in response.data]
|
||||
texts = [item.text for item in response.data]
|
||||
|
||||
return data, included, meta, tweet_ids, texts, next_token
|
||||
|
||||
raise Exception("No tweets found in this list")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
list_data, included, meta, tweet_ids, texts, next_token = (
|
||||
self.get_list_tweets(
|
||||
credentials,
|
||||
input_data.list_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
|
||||
if tweet_ids:
|
||||
yield "tweet_ids", tweet_ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if list_data:
|
||||
yield "data", list_data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,275 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterDeleteListBlock(Block):
|
||||
"""
|
||||
Deletes a Twitter List owned by the authenticated user
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to be deleted",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the deletion was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="843c6892-a62f-11ef-a5c8-b71239a78d3b",
|
||||
description="This block deletes a specified Twitter List owned by the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterDeleteListBlock.Input,
|
||||
output_schema=TwitterDeleteListBlock.Output,
|
||||
test_input={"list_id": "1234567890", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_list(credentials: TwitterCredentials, list_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.delete_list(id=list_id, user_auth=False)
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_list(credentials, input_data.list_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterUpdateListBlock(Block):
|
||||
"""
|
||||
Updates a Twitter List owned by the authenticated user
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to be updated",
|
||||
placeholder="Enter list ID",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
name: str = SchemaField(
|
||||
description="New name for the List",
|
||||
placeholder="Enter list name",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
description: str = SchemaField(
|
||||
description="New description for the List",
|
||||
placeholder="Enter list description",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the update was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7d12630a-a62f-11ef-90c9-8f5a996612c3",
|
||||
description="This block updates a specified Twitter List owned by the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUpdateListBlock.Input,
|
||||
output_schema=TwitterUpdateListBlock.Output,
|
||||
test_input={
|
||||
"list_id": "1234567890",
|
||||
"name": "Updated List Name",
|
||||
"description": "Updated List Description",
|
||||
"private": True,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"update_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_list(
|
||||
credentials: TwitterCredentials, list_id: str, name: str, description: str
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.update_list(
|
||||
id=list_id,
|
||||
name=None if name == "" else name,
|
||||
description=None if description == "" else description,
|
||||
user_auth=False,
|
||||
)
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.update_list(
|
||||
credentials,
|
||||
input_data.list_id,
|
||||
input_data.name,
|
||||
input_data.description,
|
||||
)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterCreateListBlock(Block):
|
||||
"""
|
||||
Creates a Twitter List owned by the authenticated user
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "offline.access"]
|
||||
)
|
||||
|
||||
name: str = SchemaField(
|
||||
description="The name of the List to be created",
|
||||
placeholder="Enter list name",
|
||||
advanced=False,
|
||||
default="",
|
||||
)
|
||||
|
||||
description: str = SchemaField(
|
||||
description="Description of the List",
|
||||
placeholder="Enter list description",
|
||||
advanced=False,
|
||||
default="",
|
||||
)
|
||||
|
||||
private: bool = SchemaField(
|
||||
description="Whether the List should be private",
|
||||
advanced=False,
|
||||
default=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
url: str = SchemaField(description="URL of the created list")
|
||||
list_id: str = SchemaField(description="ID of the created list")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="724148ba-a62f-11ef-89ba-5349b813ef5f",
|
||||
description="This block creates a new Twitter List for the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterCreateListBlock.Input,
|
||||
output_schema=TwitterCreateListBlock.Output,
|
||||
test_input={
|
||||
"name": "New List Name",
|
||||
"description": "New List Description",
|
||||
"private": True,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("list_id", "1234567890"),
|
||||
("url", "https://twitter.com/i/lists/1234567890"),
|
||||
],
|
||||
test_mock={"create_list": lambda *args, **kwargs: ("1234567890")},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_list(
|
||||
credentials: TwitterCredentials, name: str, description: str, private: bool
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
response = cast(
|
||||
Response,
|
||||
client.create_list(
|
||||
name=None if name == "" else name,
|
||||
description=None if description == "" else description,
|
||||
private=private,
|
||||
user_auth=False,
|
||||
),
|
||||
)
|
||||
|
||||
list_id = str(response.data["id"])
|
||||
|
||||
return list_id
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
list_id = self.create_list(
|
||||
credentials, input_data.name, input_data.description, input_data.private
|
||||
)
|
||||
yield "list_id", list_id
|
||||
yield "url", f"https://twitter.com/i/lists/{list_id}"
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,283 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import ListExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ListExpansionInputs,
|
||||
ListExpansionsFilter,
|
||||
ListFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterUnpinListBlock(Block):
|
||||
"""
|
||||
Enables the authenticated user to unpin a List.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "users.read", "tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to unpin",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the unpin was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a099c034-a62f-11ef-9622-47d0ceb73555",
|
||||
description="This block allows the authenticated user to unpin a specified List.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnpinListBlock.Input,
|
||||
output_schema=TwitterUnpinListBlock.Output,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"unpin_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unpin_list(credentials: TwitterCredentials, list_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unpin_list(list_id=list_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unpin_list(credentials, input_data.list_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterPinListBlock(Block):
|
||||
"""
|
||||
Enables the authenticated user to pin a List.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["list.write", "users.read", "tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
list_id: str = SchemaField(
|
||||
description="The ID of the List to pin",
|
||||
placeholder="Enter list ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the pin was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8ec16e48-a62f-11ef-9f35-f3d6de43a802",
|
||||
description="This block allows the authenticated user to pin a specified List.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterPinListBlock.Input,
|
||||
output_schema=TwitterPinListBlock.Output,
|
||||
test_input={"list_id": "123456789", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"pin_list": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def pin_list(credentials: TwitterCredentials, list_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.pin_list(list_id=list_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.pin_list(credentials, input_data.list_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetPinnedListsBlock(Block):
|
||||
"""
|
||||
Returns the Lists pinned by the authenticated user.
|
||||
"""
|
||||
|
||||
class Input(ListExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["lists.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
list_ids: list[str] = SchemaField(description="List IDs of the pinned lists")
|
||||
list_names: list[str] = SchemaField(
|
||||
description="List names of the pinned lists"
|
||||
)
|
||||
|
||||
data: list[dict] = SchemaField(
|
||||
description="Response data containing pinned lists"
|
||||
)
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about the response")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="97e03aae-a62f-11ef-bc53-5b89cb02888f",
|
||||
description="This block returns the Lists pinned by the authenticated user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetPinnedListsBlock.Input,
|
||||
output_schema=TwitterGetPinnedListsBlock.Output,
|
||||
test_input={
|
||||
"expansions": None,
|
||||
"list_fields": None,
|
||||
"user_fields": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("list_ids", ["84839422"]),
|
||||
("list_names", ["Twitter List"]),
|
||||
("data", [{"id": "84839422", "name": "Twitter List"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_pinned_lists": lambda *args, **kwargs: (
|
||||
[{"id": "84839422", "name": "Twitter List"}],
|
||||
{},
|
||||
{},
|
||||
["84839422"],
|
||||
["Twitter List"],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_pinned_lists(
|
||||
credentials: TwitterCredentials,
|
||||
expansions: ListExpansionsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
list_fields: ListFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {"user_auth": False}
|
||||
|
||||
params = (
|
||||
ListExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_user_fields(user_fields)
|
||||
.add_list_fields(list_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_pinned_lists(**params))
|
||||
|
||||
meta = {}
|
||||
list_ids = []
|
||||
list_names = []
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
list_ids = [str(item.id) for item in response.data]
|
||||
list_names = [item.name for item in response.data]
|
||||
return data, included, meta, list_ids, list_names
|
||||
|
||||
raise Exception("Lists not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
list_data, included, meta, list_ids, list_names = self.get_pinned_lists(
|
||||
credentials,
|
||||
input_data.expansions,
|
||||
input_data.user_fields,
|
||||
input_data.list_fields,
|
||||
)
|
||||
|
||||
if list_ids:
|
||||
yield "list_ids", list_ids
|
||||
if list_names:
|
||||
yield "list_names", list_names
|
||||
if list_data:
|
||||
yield "data", list_data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,195 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import SpaceExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
SpaceExpansionInputs,
|
||||
SpaceExpansionsFilter,
|
||||
SpaceFieldsFilter,
|
||||
SpaceStatesFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterSearchSpacesBlock(Block):
|
||||
"""
|
||||
Returns live or scheduled Spaces matching specified search terms [for a week only]
|
||||
"""
|
||||
|
||||
class Input(SpaceExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["spaces.read", "users.read", "tweet.read", "offline.access"]
|
||||
)
|
||||
|
||||
query: str = SchemaField(
|
||||
description="Search term to find in Space titles",
|
||||
placeholder="Enter search query",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
state: SpaceStatesFilter = SchemaField(
|
||||
description="Type of Spaces to return (live, scheduled, or all)",
|
||||
placeholder="Enter state filter",
|
||||
default=SpaceStatesFilter.all,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="List of space IDs")
|
||||
titles: list[str] = SchemaField(description="List of space titles")
|
||||
host_ids: list = SchemaField(description="List of host IDs")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete space data")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="aaefdd48-a62f-11ef-a73c-3f44df63e276",
|
||||
description="This block searches for Twitter Spaces based on specified terms.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterSearchSpacesBlock.Input,
|
||||
output_schema=TwitterSearchSpacesBlock.Output,
|
||||
test_input={
|
||||
"query": "tech",
|
||||
"max_results": 1,
|
||||
"state": "live",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"space_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1234"]),
|
||||
("titles", ["Tech Talk"]),
|
||||
("host_ids", ["5678"]),
|
||||
("data", [{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}]),
|
||||
],
|
||||
test_mock={
|
||||
"search_spaces": lambda *args, **kwargs: (
|
||||
[{"id": "1234", "title": "Tech Talk", "host_ids": ["5678"]}],
|
||||
{},
|
||||
{},
|
||||
["1234"],
|
||||
["Tech Talk"],
|
||||
["5678"],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_spaces(
|
||||
credentials: TwitterCredentials,
|
||||
query: str,
|
||||
max_results: int,
|
||||
state: SpaceStatesFilter,
|
||||
expansions: SpaceExpansionsFilter | None,
|
||||
space_fields: SpaceFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {"query": query, "max_results": max_results, "state": state.value}
|
||||
|
||||
params = (
|
||||
SpaceExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_space_fields(space_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.search_spaces(**params))
|
||||
|
||||
meta = {}
|
||||
next_token = ""
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
if "next_token" in meta:
|
||||
next_token = meta["next_token"]
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
ids = [str(space["id"]) for space in response.data]
|
||||
titles = [space["title"] for space in data]
|
||||
host_ids = [space["host_ids"] for space in data]
|
||||
|
||||
return data, included, meta, ids, titles, host_ids, next_token
|
||||
|
||||
raise Exception("Spaces not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, meta, ids, titles, host_ids, next_token = (
|
||||
self.search_spaces(
|
||||
credentials,
|
||||
input_data.query,
|
||||
input_data.max_results,
|
||||
input_data.state,
|
||||
input_data.expansions,
|
||||
input_data.space_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if titles:
|
||||
yield "titles", titles
|
||||
if host_ids:
|
||||
yield "host_ids", host_ids
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "includes", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,629 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import (
|
||||
SpaceExpansionsBuilder,
|
||||
TweetExpansionsBuilder,
|
||||
UserExpansionsBuilder,
|
||||
)
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
SpaceExpansionInputs,
|
||||
SpaceExpansionsFilter,
|
||||
SpaceFieldsFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetSpacesBlock(Block):
|
||||
"""
|
||||
Gets information about multiple Twitter Spaces specified by Space IDs or creator user IDs
|
||||
"""
|
||||
|
||||
class Input(SpaceExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["spaces.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
space_ids: list[str] = SchemaField(
|
||||
description="List of Space IDs to lookup (up to 100)",
|
||||
placeholder="Enter Space IDs",
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
user_ids: list[str] = SchemaField(
|
||||
description="List of user IDs to lookup their Spaces (up to 100)",
|
||||
placeholder="Enter user IDs",
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
ids: list[str] = SchemaField(description="List of space IDs")
|
||||
titles: list[str] = SchemaField(description="List of space titles")
|
||||
|
||||
# Complete outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete space data")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d75bd7d8-a62f-11ef-b0d8-c7a9496f617f",
|
||||
description="This block retrieves information about multiple Twitter Spaces.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpacesBlock.Input,
|
||||
output_schema=TwitterGetSpacesBlock.Output,
|
||||
test_input={
|
||||
"space_ids": ["1DXxyRYNejbKM"],
|
||||
"user_ids": [],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"space_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1DXxyRYNejbKM"]),
|
||||
("titles", ["Test Space"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{
|
||||
"id": "1DXxyRYNejbKM",
|
||||
"title": "Test Space",
|
||||
"host_id": "1234567",
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_spaces": lambda *args, **kwargs: (
|
||||
[
|
||||
{
|
||||
"id": "1DXxyRYNejbKM",
|
||||
"title": "Test Space",
|
||||
"host_id": "1234567",
|
||||
}
|
||||
],
|
||||
{},
|
||||
["1DXxyRYNejbKM"],
|
||||
["Test Space"],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_spaces(
|
||||
credentials: TwitterCredentials,
|
||||
space_ids: list[str],
|
||||
user_ids: list[str],
|
||||
expansions: SpaceExpansionsFilter | None,
|
||||
space_fields: SpaceFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"ids": None if space_ids == [] else space_ids,
|
||||
"user_ids": None if user_ids == [] else user_ids,
|
||||
}
|
||||
|
||||
params = (
|
||||
SpaceExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_space_fields(space_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_spaces(**params))
|
||||
|
||||
ids = []
|
||||
titles = []
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
|
||||
if response.data:
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
ids = [space["id"] for space in data]
|
||||
titles = [space["title"] for space in data]
|
||||
|
||||
return data, included, ids, titles
|
||||
|
||||
raise Exception("No spaces found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, ids, titles = self.get_spaces(
|
||||
credentials,
|
||||
input_data.space_ids,
|
||||
input_data.user_ids,
|
||||
input_data.expansions,
|
||||
input_data.space_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if titles:
|
||||
yield "titles", titles
|
||||
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "includes", included
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetSpaceByIdBlock(Block):
|
||||
"""
|
||||
Gets information about a single Twitter Space specified by Space ID
|
||||
"""
|
||||
|
||||
class Input(SpaceExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["spaces.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
id: str = SchemaField(description="Space ID")
|
||||
title: str = SchemaField(description="Space title")
|
||||
host_ids: list[str] = SchemaField(description="Host ID")
|
||||
|
||||
# Complete outputs for advanced use
|
||||
data: dict = SchemaField(description="Complete space data")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c79700de-a62f-11ef-ab20-fb32bf9d5a9d",
|
||||
description="This block retrieves information about a single Twitter Space.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceByIdBlock.Input,
|
||||
output_schema=TwitterGetSpaceByIdBlock.Output,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"space_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "1DXxyRYNejbKM"),
|
||||
("title", "Test Space"),
|
||||
("host_ids", ["1234567"]),
|
||||
(
|
||||
"data",
|
||||
{
|
||||
"id": "1DXxyRYNejbKM",
|
||||
"title": "Test Space",
|
||||
"host_ids": ["1234567"],
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_space": lambda *args, **kwargs: (
|
||||
{
|
||||
"id": "1DXxyRYNejbKM",
|
||||
"title": "Test Space",
|
||||
"host_ids": ["1234567"],
|
||||
},
|
||||
{},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_space(
|
||||
credentials: TwitterCredentials,
|
||||
space_id: str,
|
||||
expansions: SpaceExpansionsFilter | None,
|
||||
space_fields: SpaceFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": space_id,
|
||||
}
|
||||
|
||||
params = (
|
||||
SpaceExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_space_fields(space_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_space(**params))
|
||||
|
||||
includes = {}
|
||||
if response.includes:
|
||||
for key, value in response.includes.items():
|
||||
if isinstance(value, list):
|
||||
includes[key] = [
|
||||
item.data if hasattr(item, "data") else item
|
||||
for item in value
|
||||
]
|
||||
else:
|
||||
includes[key] = value.data if hasattr(value, "data") else value
|
||||
|
||||
data = {}
|
||||
if response.data:
|
||||
for key, value in response.data.items():
|
||||
if isinstance(value, list):
|
||||
data[key] = [
|
||||
item.data if hasattr(item, "data") else item
|
||||
for item in value
|
||||
]
|
||||
else:
|
||||
data[key] = value.data if hasattr(value, "data") else value
|
||||
|
||||
return data, includes
|
||||
|
||||
raise Exception("Space not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
space_data, includes = self.get_space(
|
||||
credentials,
|
||||
input_data.space_id,
|
||||
input_data.expansions,
|
||||
input_data.space_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
# Common outputs
|
||||
if space_data:
|
||||
yield "id", space_data.get("id")
|
||||
yield "title", space_data.get("title")
|
||||
yield "host_ids", space_data.get("host_ids")
|
||||
|
||||
if space_data:
|
||||
yield "data", space_data
|
||||
if includes:
|
||||
yield "includes", includes
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
# Not tested yet, might have some problem
|
||||
class TwitterGetSpaceBuyersBlock(Block):
|
||||
"""
|
||||
Gets list of users who purchased a ticket to the requested Space
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["spaces.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup buyers for",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
buyer_ids: list[str] = SchemaField(description="List of buyer IDs")
|
||||
usernames: list[str] = SchemaField(description="List of buyer usernames")
|
||||
|
||||
# Complete outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete space buyers data")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c1c121a8-a62f-11ef-8b0e-d7b85f96a46f",
|
||||
description="This block retrieves a list of users who purchased tickets to a Twitter Space.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceBuyersBlock.Input,
|
||||
output_schema=TwitterGetSpaceBuyersBlock.Output,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("buyer_ids", ["2244994945"]),
|
||||
("usernames", ["testuser"]),
|
||||
(
|
||||
"data",
|
||||
[{"id": "2244994945", "username": "testuser", "name": "Test User"}],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_space_buyers": lambda *args, **kwargs: (
|
||||
[{"id": "2244994945", "username": "testuser", "name": "Test User"}],
|
||||
{},
|
||||
["2244994945"],
|
||||
["testuser"],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_space_buyers(
|
||||
credentials: TwitterCredentials,
|
||||
space_id: str,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": space_id,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_space_buyers(**params))
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
|
||||
if response.data:
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
buyer_ids = [buyer["id"] for buyer in data]
|
||||
usernames = [buyer["username"] for buyer in data]
|
||||
|
||||
return data, included, buyer_ids, usernames
|
||||
|
||||
raise Exception("No buyers found for this Space")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
buyers_data, included, buyer_ids, usernames = self.get_space_buyers(
|
||||
credentials,
|
||||
input_data.space_id,
|
||||
input_data.expansions,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
if buyer_ids:
|
||||
yield "buyer_ids", buyer_ids
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
|
||||
if buyers_data:
|
||||
yield "data", buyers_data
|
||||
if included:
|
||||
yield "includes", included
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetSpaceTweetsBlock(Block):
|
||||
"""
|
||||
Gets list of Tweets shared in the requested Space
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["spaces.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
space_id: str = SchemaField(
|
||||
description="Space ID to lookup tweets for",
|
||||
placeholder="Enter Space ID",
|
||||
required=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
tweet_ids: list[str] = SchemaField(description="List of tweet IDs")
|
||||
texts: list[str] = SchemaField(description="List of tweet texts")
|
||||
|
||||
# Complete outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete space tweets data")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Response metadata")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b69731e6-a62f-11ef-b2d4-1bf14dd6aee4",
|
||||
description="This block retrieves tweets shared in a Twitter Space.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetSpaceTweetsBlock.Input,
|
||||
output_schema=TwitterGetSpaceTweetsBlock.Output,
|
||||
test_input={
|
||||
"space_id": "1DXxyRYNejbKM",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("tweet_ids", ["1234567890"]),
|
||||
("texts", ["Test tweet"]),
|
||||
("data", [{"id": "1234567890", "text": "Test tweet"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_space_tweets": lambda *args, **kwargs: (
|
||||
[{"id": "1234567890", "text": "Test tweet"}], # data
|
||||
{},
|
||||
["1234567890"],
|
||||
["Test tweet"],
|
||||
{},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_space_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
space_id: str,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": space_id,
|
||||
}
|
||||
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_space_tweets(**params))
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
|
||||
if response.data:
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
tweet_ids = [str(tweet["id"]) for tweet in data]
|
||||
texts = [tweet["text"] for tweet in data]
|
||||
|
||||
meta = response.meta or {}
|
||||
|
||||
return data, included, tweet_ids, texts, meta
|
||||
|
||||
raise Exception("No tweets found for this Space")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
tweets_data, included, tweet_ids, texts, meta = self.get_space_tweets(
|
||||
credentials,
|
||||
input_data.space_id,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
if tweet_ids:
|
||||
yield "tweet_ids", tweet_ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
|
||||
if tweets_data:
|
||||
yield "data", tweets_data
|
||||
if included:
|
||||
yield "includes", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,20 @@
|
||||
import tweepy
|
||||
|
||||
|
||||
def handle_tweepy_exception(e: Exception) -> str:
|
||||
if isinstance(e, tweepy.BadRequest):
|
||||
return f"Bad Request (400): {str(e)}"
|
||||
elif isinstance(e, tweepy.Unauthorized):
|
||||
return f"Unauthorized (401): {str(e)}"
|
||||
elif isinstance(e, tweepy.Forbidden):
|
||||
return f"Forbidden (403): {str(e)}"
|
||||
elif isinstance(e, tweepy.NotFound):
|
||||
return f"Not Found (404): {str(e)}"
|
||||
elif isinstance(e, tweepy.TooManyRequests):
|
||||
return f"Too Many Requests (429): {str(e)}"
|
||||
elif isinstance(e, tweepy.TwitterServerError):
|
||||
return f"Twitter Server Error (5xx): {str(e)}"
|
||||
elif isinstance(e, tweepy.TweepyException):
|
||||
return f"Tweepy Error: {str(e)}"
|
||||
else:
|
||||
return f"Unexpected error: {str(e)}"
|
||||
@@ -0,0 +1,372 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import TweetExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterBookmarkTweetBlock(Block):
|
||||
"""
|
||||
Bookmark a tweet on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "bookmark.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to bookmark",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the bookmark was successful")
|
||||
error: str = SchemaField(description="Error message if the bookmark failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f33d67be-a62f-11ef-a797-ff83ec29ee8e",
|
||||
description="This block bookmarks a tweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterBookmarkTweetBlock.Input,
|
||||
output_schema=TwitterBookmarkTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"bookmark_tweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def bookmark_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.bookmark(tweet_id)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.bookmark_tweet(credentials, input_data.tweet_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetBookmarkedTweetsBlock(Block):
|
||||
"""
|
||||
Get All your bookmarked tweets from Twitter
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "bookmark.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
id: list[str] = SchemaField(description="All Tweet IDs")
|
||||
text: list[str] = SchemaField(description="All Tweet texts")
|
||||
userId: list[str] = SchemaField(description="IDs of the tweet authors")
|
||||
userName: list[str] = SchemaField(description="Usernames of the tweet authors")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ed26783e-a62f-11ef-9a21-c77c57dd8a1f",
|
||||
description="This block retrieves bookmarked tweets from Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetBookmarkedTweetsBlock.Input,
|
||||
output_schema=TwitterGetBookmarkedTweetsBlock.Output,
|
||||
test_input={
|
||||
"max_results": 2,
|
||||
"pagination_token": None,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", ["1234567890"]),
|
||||
("text", ["Test tweet"]),
|
||||
("userId", ["12345"]),
|
||||
("userName", ["testuser"]),
|
||||
("data", [{"id": "1234567890", "text": "Test tweet"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_bookmarked_tweets": lambda *args, **kwargs: (
|
||||
["1234567890"],
|
||||
["Test tweet"],
|
||||
["12345"],
|
||||
["testuser"],
|
||||
[{"id": "1234567890", "text": "Test tweet"}],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_bookmarked_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
}
|
||||
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(
|
||||
Response,
|
||||
client.get_bookmarks(**params),
|
||||
)
|
||||
|
||||
meta = {}
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if "users" in included:
|
||||
for user in included["users"]:
|
||||
user_ids.append(str(user["id"]))
|
||||
user_names.append(user["username"])
|
||||
|
||||
return (
|
||||
tweet_ids,
|
||||
tweet_texts,
|
||||
user_ids,
|
||||
user_names,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
raise Exception("No bookmarked tweets found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta, next_token = (
|
||||
self.get_bookmarked_tweets(
|
||||
credentials,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
if ids:
|
||||
yield "id", ids
|
||||
if texts:
|
||||
yield "text", texts
|
||||
if user_ids:
|
||||
yield "userId", user_ids
|
||||
if user_names:
|
||||
yield "userName", user_names
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterRemoveBookmarkTweetBlock(Block):
|
||||
"""
|
||||
Remove a bookmark for a tweet on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "bookmark.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to remove bookmark from",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the bookmark was successfully removed"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the bookmark removal failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="e4100684-a62f-11ef-9be9-770cb41a2616",
|
||||
description="This block removes a bookmark from a tweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveBookmarkTweetBlock.Input,
|
||||
output_schema=TwitterRemoveBookmarkTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"remove_bookmark_tweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_bookmark_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.remove_bookmark(tweet_id)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.remove_bookmark_tweet(credentials, input_data.tweet_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
154
autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py
Normal file
154
autogpt_platform/backend/backend/blocks/twitter/tweets/hide.py
Normal file
@@ -0,0 +1,154 @@
|
||||
import tweepy
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterHideReplyBlock(Block):
|
||||
"""
|
||||
Hides a reply of one of your tweets
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.moderate.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet reply to hide",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the operation was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="07d58b3e-a630-11ef-a030-93701d1a465e",
|
||||
description="This block hides a reply to a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterHideReplyBlock.Input,
|
||||
output_schema=TwitterHideReplyBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"hide_reply": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def hide_reply(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.hide_reply(id=tweet_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.hide_reply(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterUnhideReplyBlock(Block):
|
||||
"""
|
||||
Unhides a reply to a tweet
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.moderate.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet reply to unhide",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the operation was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="fcf9e4e4-a62f-11ef-9d85-57d3d06b616a",
|
||||
description="This block unhides a reply to a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnhideReplyBlock.Input,
|
||||
output_schema=TwitterUnhideReplyBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unhide_reply": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unhide_reply(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unhide_reply(id=tweet_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unhide_reply(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
576
autogpt_platform/backend/backend/blocks/twitter/tweets/like.py
Normal file
576
autogpt_platform/backend/backend/blocks/twitter/tweets/like.py
Normal file
@@ -0,0 +1,576 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import (
|
||||
TweetExpansionsBuilder,
|
||||
UserExpansionsBuilder,
|
||||
)
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterLikeTweetBlock(Block):
|
||||
"""
|
||||
Likes a tweet
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "like.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to like",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the operation was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4d0b4c5c-a630-11ef-8e08-1b14c507b347",
|
||||
description="This block likes a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterLikeTweetBlock.Input,
|
||||
output_schema=TwitterLikeTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"like_tweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def like_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.like(tweet_id=tweet_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.like_tweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetLikingUsersBlock(Block):
|
||||
"""
|
||||
Gets information about users who liked a one of your tweet
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "like.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to get liking users for",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for getting next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
id: list[str] = SchemaField(description="All User IDs who liked the tweet")
|
||||
username: list[str] = SchemaField(
|
||||
description="All User usernames who liked the tweet"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="34275000-a630-11ef-b01e-5f00d9077c08",
|
||||
description="This block gets information about users who liked a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetLikingUsersBlock.Input,
|
||||
output_schema=TwitterGetLikingUsersBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"max_results": 1,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", ["1234567890"]),
|
||||
("username", ["testuser"]),
|
||||
("data", [{"id": "1234567890", "username": "testuser"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_liking_users": lambda *args, **kwargs: (
|
||||
["1234567890"],
|
||||
["testuser"],
|
||||
[{"id": "1234567890", "username": "testuser"}],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_liking_users(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": tweet_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_liking_users(**params))
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No liking users found")
|
||||
|
||||
meta = {}
|
||||
user_ids = []
|
||||
usernames = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
user_ids = [str(user.id) for user in response.data]
|
||||
usernames = [user.username for user in response.data]
|
||||
|
||||
return user_ids, usernames, data, included, meta, next_token
|
||||
|
||||
raise Exception("No liking users found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, usernames, data, included, meta, next_token = self.get_liking_users(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "id", ids
|
||||
if usernames:
|
||||
yield "username", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetLikedTweetsBlock(Block):
|
||||
"""
|
||||
Gets information about tweets liked by you
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "like.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="ID of the user to get liked tweets for",
|
||||
placeholder="Enter user ID",
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (5-100)",
|
||||
placeholder="100",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for getting next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="All Tweet IDs")
|
||||
texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
userIds: list[str] = SchemaField(
|
||||
description="List of user ids that authored the tweets"
|
||||
)
|
||||
userNames: list[str] = SchemaField(
|
||||
description="List of user names that authored the tweets"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="292e7c78-a630-11ef-9f40-df5dffaca106",
|
||||
description="This block gets information about tweets liked by a user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetLikedTweetsBlock.Input,
|
||||
output_schema=TwitterGetLikedTweetsBlock.Output,
|
||||
test_input={
|
||||
"user_id": "1234567890",
|
||||
"max_results": 2,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["12345", "67890"]),
|
||||
("texts", ["Tweet 1", "Tweet 2"]),
|
||||
("userIds", ["67890", "67891"]),
|
||||
("userNames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "12345", "text": "Tweet 1"},
|
||||
{"id": "67890", "text": "Tweet 2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_liked_tweets": lambda *args, **kwargs: (
|
||||
["12345", "67890"],
|
||||
["Tweet 1", "Tweet 2"],
|
||||
["67890", "67891"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "12345", "text": "Tweet 1"},
|
||||
{"id": "67890", "text": "Tweet 2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_liked_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_liked_tweets(**params))
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No liked tweets found")
|
||||
|
||||
meta = {}
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if "users" in response.includes:
|
||||
user_ids = [str(user["id"]) for user in response.includes["users"]]
|
||||
user_names = [
|
||||
user["username"] for user in response.includes["users"]
|
||||
]
|
||||
|
||||
return (
|
||||
tweet_ids,
|
||||
tweet_texts,
|
||||
user_ids,
|
||||
user_names,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
raise Exception("No liked tweets found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta, next_token = (
|
||||
self.get_liked_tweets(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if user_ids:
|
||||
yield "userIds", user_ids
|
||||
if user_names:
|
||||
yield "userNames", user_names
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterUnlikeTweetBlock(Block):
|
||||
"""
|
||||
Unlikes a tweet that was previously liked
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "like.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to unlike",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the operation was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="1ed5eab8-a630-11ef-8e21-cbbbc80cbb85",
|
||||
description="This block unlikes a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnlikeTweetBlock.Input,
|
||||
output_schema=TwitterUnlikeTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unlike_tweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unlike_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unlike(tweet_id=tweet_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unlike_tweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
557
autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py
Normal file
557
autogpt_platform/backend/backend/blocks/twitter/tweets/manage.py
Normal file
@@ -0,0 +1,557 @@
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import (
|
||||
TweetDurationBuilder,
|
||||
TweetExpansionsBuilder,
|
||||
TweetPostBuilder,
|
||||
TweetSearchBuilder,
|
||||
)
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetReplySettingsFilter,
|
||||
TweetTimeWindowInputs,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterPostTweetBlock(Block):
|
||||
"""
|
||||
Create a tweet on Twitter with the option to include one additional element such as a media, quote, or deep link.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_text: str = SchemaField(
|
||||
description="Text of the tweet to post [It's Optional if you want to add media, quote, or deep link]",
|
||||
placeholder="Enter your tweet",
|
||||
advanced=False,
|
||||
default="",
|
||||
)
|
||||
|
||||
media_ids: list = SchemaField(
|
||||
description="List of media IDs to attach to the tweet, [ex - 1455952740635586573]",
|
||||
placeholder="Enter media IDs",
|
||||
default=[],
|
||||
)
|
||||
|
||||
media_tagged_user_ids: list = SchemaField(
|
||||
description="List of user IDs to tag in media, [ex - 1455952740635586573]",
|
||||
placeholder="Enter media tagged user IDs",
|
||||
default=[],
|
||||
)
|
||||
|
||||
direct_message_deep_link: str = SchemaField(
|
||||
description="Link directly to a Direct Message conversation with an account [ex - https://twitter.com/messages/compose?recipient_id={your_id}]",
|
||||
placeholder="Enter direct message deep link",
|
||||
default="",
|
||||
)
|
||||
|
||||
poll_options: list = SchemaField(
|
||||
description="List of poll options",
|
||||
placeholder="Enter poll options",
|
||||
default=[],
|
||||
)
|
||||
|
||||
poll_duration_minutes: int = SchemaField(
|
||||
description="Duration of the poll in minutes",
|
||||
placeholder="Enter poll duration in minutes",
|
||||
default=0,
|
||||
)
|
||||
|
||||
for_super_followers_only: bool = SchemaField(
|
||||
description="Tweet exclusively for Super Followers",
|
||||
placeholder="Enter for super followers only",
|
||||
default=False,
|
||||
)
|
||||
|
||||
place_id: str = SchemaField(
|
||||
description="Adds optional location information to a tweet if geo settings are enabled in your profile.",
|
||||
placeholder="Enter place ID",
|
||||
default="",
|
||||
)
|
||||
|
||||
quote_tweet_id: str = SchemaField(
|
||||
description="Link to the Tweet being quoted, [ex- 1455953449422516226]",
|
||||
advanced=True,
|
||||
placeholder="Enter quote tweet ID",
|
||||
default="",
|
||||
)
|
||||
|
||||
exclude_reply_user_ids: list = SchemaField(
|
||||
description="User IDs to exclude from reply Tweet thread. [ex - 6253282] ",
|
||||
placeholder="Enter user IDs to exclude",
|
||||
advanced=True,
|
||||
default=[],
|
||||
)
|
||||
|
||||
in_reply_to_tweet_id: str = SchemaField(
|
||||
description="Tweet ID being replied to. Please note that in_reply_to_tweet_id needs to be in the request if exclude_reply_user_ids is present",
|
||||
default="",
|
||||
placeholder="Enter in reply to tweet ID",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
reply_settings: TweetReplySettingsFilter = SchemaField(
|
||||
description="Who can reply to the Tweet (mentionedUsers or following)",
|
||||
placeholder="Enter reply settings",
|
||||
advanced=True,
|
||||
default=TweetReplySettingsFilter(All_Users=True),
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
tweet_id: str = SchemaField(description="ID of the created tweet")
|
||||
tweet_url: str = SchemaField(description="URL to the tweet")
|
||||
error: str = SchemaField(
|
||||
description="Error message if the tweet posting failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7bb0048a-a630-11ef-aeb8-abc0dadb9b12",
|
||||
description="This block posts a tweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterPostTweetBlock.Input,
|
||||
output_schema=TwitterPostTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_text": "This is a test tweet.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"direct_message_deep_link": "",
|
||||
"for_super_followers_only": False,
|
||||
"place_id": "",
|
||||
"media_ids": [],
|
||||
"media_tagged_user_ids": [],
|
||||
"quote_tweet_id": "",
|
||||
"exclude_reply_user_ids": [],
|
||||
"in_reply_to_tweet_id": "",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("tweet_id", "1234567890"),
|
||||
("tweet_url", "https://twitter.com/user/status/1234567890"),
|
||||
],
|
||||
test_mock={
|
||||
"post_tweet": lambda *args, **kwargs: (
|
||||
"1234567890",
|
||||
"https://twitter.com/user/status/1234567890",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def post_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
input_txt: str,
|
||||
media_ids: list,
|
||||
media_tagged_user_ids: list,
|
||||
direct_message_deep_link: str,
|
||||
for_super_followers_only: bool,
|
||||
place_id: str,
|
||||
poll_options: list,
|
||||
poll_duration_minutes: int,
|
||||
quote_tweet_id: str,
|
||||
exclude_reply_user_ids: list,
|
||||
in_reply_to_tweet_id: str,
|
||||
reply_settings: TweetReplySettingsFilter,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = (
|
||||
TweetPostBuilder()
|
||||
.add_text(input_txt)
|
||||
.add_media(media_ids, media_tagged_user_ids)
|
||||
.add_deep_link(direct_message_deep_link)
|
||||
.add_super_followers(for_super_followers_only)
|
||||
.add_poll_options(poll_options)
|
||||
.add_poll_duration(poll_duration_minutes)
|
||||
.add_place(place_id)
|
||||
.add_quote(quote_tweet_id)
|
||||
.add_reply_settings(
|
||||
exclude_reply_user_ids, in_reply_to_tweet_id, reply_settings
|
||||
)
|
||||
.build()
|
||||
)
|
||||
|
||||
tweet = cast(Response, client.create_tweet(**params))
|
||||
|
||||
if not tweet.data:
|
||||
raise Exception("Failed to create tweet")
|
||||
|
||||
tweet_id = tweet.data["id"]
|
||||
tweet_url = f"https://twitter.com/user/status/{tweet_id}"
|
||||
return str(tweet_id), tweet_url
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
tweet_id, tweet_url = self.post_tweet(
|
||||
credentials,
|
||||
input_data.tweet_text,
|
||||
input_data.media_ids,
|
||||
input_data.media_tagged_user_ids,
|
||||
input_data.direct_message_deep_link,
|
||||
input_data.for_super_followers_only,
|
||||
input_data.place_id,
|
||||
input_data.poll_options,
|
||||
input_data.poll_duration_minutes,
|
||||
input_data.quote_tweet_id,
|
||||
input_data.exclude_reply_user_ids,
|
||||
input_data.in_reply_to_tweet_id,
|
||||
input_data.reply_settings,
|
||||
)
|
||||
yield "tweet_id", tweet_id
|
||||
yield "tweet_url", tweet_url
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterDeleteTweetBlock(Block):
|
||||
"""
|
||||
Deletes a tweet on Twitter using twitter Id
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to delete",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the tweet was successfully deleted"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the tweet deletion failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="761babf0-a630-11ef-a03d-abceb082f58f",
|
||||
description="This block deletes a tweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterDeleteTweetBlock.Input,
|
||||
output_schema=TwitterDeleteTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_tweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_tweet(credentials: TwitterCredentials, tweet_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
client.delete_tweet(id=tweet_id, user_auth=False)
|
||||
return True
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
except Exception:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_tweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterSearchRecentTweetsBlock(Block):
|
||||
"""
|
||||
Searches all public Tweets in Twitter history
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
query: str = SchemaField(
|
||||
description="Search query (up to 1024 characters)",
|
||||
placeholder="Enter search query",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (10-500)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination: str | None = SchemaField(
|
||||
description="Token for pagination",
|
||||
default="",
|
||||
placeholder="Enter pagination token",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
tweet_ids: list[str] = SchemaField(description="All Tweet IDs")
|
||||
tweet_texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="53e5cf8e-a630-11ef-ba85-df6d666fa5d5",
|
||||
description="This block searches all public Tweets in Twitter history.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterSearchRecentTweetsBlock.Input,
|
||||
output_schema=TwitterSearchRecentTweetsBlock.Output,
|
||||
test_input={
|
||||
"query": "from:twitterapi #twitterapi",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 2,
|
||||
"start_time": "2024-12-14T18:30:00.000Z",
|
||||
"end_time": "2024-12-17T18:30:00.000Z",
|
||||
"since_id": None,
|
||||
"until_id": None,
|
||||
"sort_order": None,
|
||||
"pagination": None,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("tweet_ids", ["1373001119480344583", "1372627771717869568"]),
|
||||
(
|
||||
"tweet_texts",
|
||||
[
|
||||
"Looking to get started with the Twitter API but new to APIs in general?",
|
||||
"Thanks to everyone who joined and made today a great session!",
|
||||
],
|
||||
),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{
|
||||
"id": "1373001119480344583",
|
||||
"text": "Looking to get started with the Twitter API but new to APIs in general?",
|
||||
},
|
||||
{
|
||||
"id": "1372627771717869568",
|
||||
"text": "Thanks to everyone who joined and made today a great session!",
|
||||
},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"search_tweets": lambda *args, **kwargs: (
|
||||
["1373001119480344583", "1372627771717869568"],
|
||||
[
|
||||
"Looking to get started with the Twitter API but new to APIs in general?",
|
||||
"Thanks to everyone who joined and made today a great session!",
|
||||
],
|
||||
[
|
||||
{
|
||||
"id": "1373001119480344583",
|
||||
"text": "Looking to get started with the Twitter API but new to APIs in general?",
|
||||
},
|
||||
{
|
||||
"id": "1372627771717869568",
|
||||
"text": "Thanks to everyone who joined and made today a great session!",
|
||||
},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
query: str,
|
||||
max_results: int,
|
||||
start_time: datetime | None,
|
||||
end_time: datetime | None,
|
||||
since_id: str | None,
|
||||
until_id: str | None,
|
||||
sort_order: str | None,
|
||||
pagination: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
# Building common params
|
||||
params = (
|
||||
TweetSearchBuilder()
|
||||
.add_query(query)
|
||||
.add_pagination(max_results, pagination)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Adding time window to params If required by the user
|
||||
params = (
|
||||
TweetDurationBuilder(params)
|
||||
.add_start_time(start_time)
|
||||
.add_end_time(end_time)
|
||||
.add_since_id(since_id)
|
||||
.add_until_id(until_id)
|
||||
.add_sort_order(sort_order)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.search_recent_tweets(**params))
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No tweets found")
|
||||
|
||||
meta = {}
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
return tweet_ids, tweet_texts, data, included, meta, next_token
|
||||
|
||||
raise Exception("No tweets found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, data, included, meta, next_token = self.search_tweets(
|
||||
credentials,
|
||||
input_data.query,
|
||||
input_data.max_results,
|
||||
input_data.start_time,
|
||||
input_data.end_time,
|
||||
input_data.since_id,
|
||||
input_data.until_id,
|
||||
input_data.sort_order,
|
||||
input_data.pagination,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "tweet_ids", ids
|
||||
if texts:
|
||||
yield "tweet_texts", texts
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
224
autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py
Normal file
224
autogpt_platform/backend/backend/blocks/twitter/tweets/quote.py
Normal file
@@ -0,0 +1,224 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import TweetExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExcludesFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetQuoteTweetsBlock(Block):
|
||||
"""
|
||||
Gets quote tweets for a specified tweet ID
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to get quotes for",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Number of results to return (max 100)",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
exclude: TweetExcludesFilter | None = SchemaField(
|
||||
description="Types of tweets to exclude",
|
||||
advanced=True,
|
||||
default=None
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination",
|
||||
advanced=True,
|
||||
default="",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list = SchemaField(description="All Tweet IDs ")
|
||||
texts: list = SchemaField(description="All Tweet texts")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9fbdd208-a630-11ef-9b97-ab7a3a695ca3",
|
||||
description="This block gets quote tweets for a specific tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetQuoteTweetsBlock.Input,
|
||||
output_schema=TwitterGetQuoteTweetsBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"max_results": 2,
|
||||
"pagination_token": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["12345", "67890"]),
|
||||
("texts", ["Tweet 1", "Tweet 2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "12345", "text": "Tweet 1"},
|
||||
{"id": "67890", "text": "Tweet 2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_quote_tweets": lambda *args, **kwargs: (
|
||||
["12345", "67890"],
|
||||
["Tweet 1", "Tweet 2"],
|
||||
[
|
||||
{"id": "12345", "text": "Tweet 1"},
|
||||
{"id": "67890", "text": "Tweet 2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_quote_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
max_results: int,
|
||||
exclude: TweetExcludesFilter | None,
|
||||
pagination_token: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": tweet_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"exclude": None if exclude == TweetExcludesFilter() else exclude,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_quote_tweets(**params))
|
||||
|
||||
meta = {}
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
return tweet_ids, tweet_texts, data, included, meta, next_token
|
||||
|
||||
raise Exception("No quote tweets found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, data, included, meta, next_token = self.get_quote_tweets(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
input_data.max_results,
|
||||
input_data.exclude,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,363 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterRetweetBlock(Block):
|
||||
"""
|
||||
Retweets a tweet on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to retweet",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the retweet was successful")
|
||||
error: str = SchemaField(description="Error message if the retweet failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="bd7b8d3a-a630-11ef-be96-6f4aa4c3c4f4",
|
||||
description="This block retweets a tweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRetweetBlock.Input,
|
||||
output_schema=TwitterRetweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"retweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def retweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.retweet(
|
||||
tweet_id=tweet_id,
|
||||
user_auth=False,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.retweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterRemoveRetweetBlock(Block):
|
||||
"""
|
||||
Removes a retweet on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "tweet.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to remove retweet",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the retweet was successfully removed"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the removal failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b6e663f0-a630-11ef-a7f0-8b9b0c542ff8",
|
||||
description="This block removes a retweet on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterRemoveRetweetBlock.Input,
|
||||
output_schema=TwitterRemoveRetweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"remove_retweet": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_retweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unretweet(
|
||||
source_tweet_id=tweet_id,
|
||||
user_auth=False,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.remove_retweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetRetweetersBlock(Block):
|
||||
"""
|
||||
Gets information about who has retweeted a tweet
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="ID of the tweet to get retweeters for",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results per page (1-100)",
|
||||
default=10,
|
||||
placeholder="Enter max results",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list = SchemaField(description="List of user ids who retweeted")
|
||||
names: list = SchemaField(description="List of user names who retweeted")
|
||||
usernames: list = SchemaField(
|
||||
description="List of user usernames who retweeted"
|
||||
)
|
||||
next_token: str = SchemaField(description="Token for next page of results")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ad7aa6fa-a630-11ef-a6b0-e7ca640aa030",
|
||||
description="This block gets information about who has retweeted a tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetRetweetersBlock.Input,
|
||||
output_schema=TwitterGetRetweetersBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1234567890",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 1,
|
||||
"pagination_token": "",
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["12345"]),
|
||||
("names", ["Test User"]),
|
||||
("usernames", ["testuser"]),
|
||||
(
|
||||
"data",
|
||||
[{"id": "12345", "name": "Test User", "username": "testuser"}],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_retweeters": lambda *args, **kwargs: (
|
||||
[{"id": "12345", "name": "Test User", "username": "testuser"}],
|
||||
{},
|
||||
{},
|
||||
["12345"],
|
||||
["Test User"],
|
||||
["testuser"],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_retweeters(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": tweet_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_retweeters(**params))
|
||||
|
||||
meta = {}
|
||||
ids = []
|
||||
names = []
|
||||
usernames = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
ids = [str(user.id) for user in response.data]
|
||||
names = [user.name for user in response.data]
|
||||
usernames = [user.username for user in response.data]
|
||||
return data, included, meta, ids, names, usernames, next_token
|
||||
|
||||
raise Exception("No retweeters found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, meta, ids, names, usernames, next_token = (
|
||||
self.get_retweeters(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if names:
|
||||
yield "names", names
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,757 @@
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import (
|
||||
TweetDurationBuilder,
|
||||
TweetExpansionsBuilder,
|
||||
)
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetTimeWindowInputs,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetUserMentionsBlock(Block):
|
||||
"""
|
||||
Returns Tweets where a single user is mentioned, just put that user id
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="Unique identifier of the user for whom to return Tweets mentioning the user",
|
||||
placeholder="Enter user ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Number of tweets to retrieve (5-100)",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination", default="", advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="List of Tweet IDs")
|
||||
texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
|
||||
userIds: list[str] = SchemaField(
|
||||
description="List of user ids that mentioned the user"
|
||||
)
|
||||
userNames: list[str] = SchemaField(
|
||||
description="List of user names that mentioned the user"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="e01c890c-a630-11ef-9e20-37da24888bd0",
|
||||
description="This block retrieves Tweets mentioning a specific user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserMentionsBlock.Input,
|
||||
output_schema=TwitterGetUserMentionsBlock.Output,
|
||||
test_input={
|
||||
"user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 2,
|
||||
"start_time": "2024-12-14T18:30:00.000Z",
|
||||
"end_time": "2024-12-17T18:30:00.000Z",
|
||||
"since_id": "",
|
||||
"until_id": "",
|
||||
"sort_order": None,
|
||||
"pagination_token": None,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1373001119480344583", "1372627771717869568"]),
|
||||
("texts", ["Test mention 1", "Test mention 2"]),
|
||||
("userIds", ["67890", "67891"]),
|
||||
("userNames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test mention 1"},
|
||||
{"id": "1372627771717869568", "text": "Test mention 2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_mentions": lambda *args, **kwargs: (
|
||||
["1373001119480344583", "1372627771717869568"],
|
||||
["Test mention 1", "Test mention 2"],
|
||||
["67890", "67891"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test mention 1"},
|
||||
{"id": "1372627771717869568", "text": "Test mention 2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_mentions(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
max_results: int,
|
||||
start_time: datetime | None,
|
||||
end_time: datetime | None,
|
||||
since_id: str | None,
|
||||
until_id: str | None,
|
||||
sort_order: str | None,
|
||||
pagination: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": None if pagination == "" else pagination,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Adding time window to params If required by the user
|
||||
params = (
|
||||
TweetDurationBuilder(params)
|
||||
.add_start_time(start_time)
|
||||
.add_end_time(end_time)
|
||||
.add_since_id(since_id)
|
||||
.add_until_id(until_id)
|
||||
.add_sort_order(sort_order)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(
|
||||
Response,
|
||||
client.get_users_mentions(**params),
|
||||
)
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No tweets found")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
meta = response.meta or {}
|
||||
next_token = meta.get("next_token", "")
|
||||
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if "users" in included:
|
||||
user_ids = [str(user["id"]) for user in included["users"]]
|
||||
user_names = [user["username"] for user in included["users"]]
|
||||
|
||||
return (
|
||||
tweet_ids,
|
||||
tweet_texts,
|
||||
user_ids,
|
||||
user_names,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta, next_token = (
|
||||
self.get_mentions(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.max_results,
|
||||
input_data.start_time,
|
||||
input_data.end_time,
|
||||
input_data.since_id,
|
||||
input_data.until_id,
|
||||
input_data.sort_order,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if user_ids:
|
||||
yield "userIds", user_ids
|
||||
if user_names:
|
||||
yield "userNames", user_names
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetHomeTimelineBlock(Block):
|
||||
"""
|
||||
Returns a collection of the most recent Tweets and Retweets posted by you and users you follow
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Number of tweets to retrieve (5-100)",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination", default="", advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="List of Tweet IDs")
|
||||
texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
|
||||
userIds: list[str] = SchemaField(
|
||||
description="List of user ids that authored the tweets"
|
||||
)
|
||||
userNames: list[str] = SchemaField(
|
||||
description="List of user names that authored the tweets"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d222a070-a630-11ef-a18a-3f52f76c6962",
|
||||
description="This block retrieves the authenticated user's home timeline.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetHomeTimelineBlock.Input,
|
||||
output_schema=TwitterGetHomeTimelineBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 2,
|
||||
"start_time": "2024-12-14T18:30:00.000Z",
|
||||
"end_time": "2024-12-17T18:30:00.000Z",
|
||||
"since_id": None,
|
||||
"until_id": None,
|
||||
"sort_order": None,
|
||||
"pagination_token": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1373001119480344583", "1372627771717869568"]),
|
||||
("texts", ["Test tweet 1", "Test tweet 2"]),
|
||||
("userIds", ["67890", "67891"]),
|
||||
("userNames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test tweet 1"},
|
||||
{"id": "1372627771717869568", "text": "Test tweet 2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_timeline": lambda *args, **kwargs: (
|
||||
["1373001119480344583", "1372627771717869568"],
|
||||
["Test tweet 1", "Test tweet 2"],
|
||||
["67890", "67891"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test tweet 1"},
|
||||
{"id": "1372627771717869568", "text": "Test tweet 2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_timeline(
|
||||
credentials: TwitterCredentials,
|
||||
max_results: int,
|
||||
start_time: datetime | None,
|
||||
end_time: datetime | None,
|
||||
since_id: str | None,
|
||||
until_id: str | None,
|
||||
sort_order: str | None,
|
||||
pagination: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"max_results": max_results,
|
||||
"pagination_token": None if pagination == "" else pagination,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Adding time window to params If required by the user
|
||||
params = (
|
||||
TweetDurationBuilder(params)
|
||||
.add_start_time(start_time)
|
||||
.add_end_time(end_time)
|
||||
.add_since_id(since_id)
|
||||
.add_until_id(until_id)
|
||||
.add_sort_order(sort_order)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(
|
||||
Response,
|
||||
client.get_home_timeline(**params),
|
||||
)
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No tweets found")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
meta = response.meta or {}
|
||||
next_token = meta.get("next_token", "")
|
||||
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if "users" in included:
|
||||
user_ids = [str(user["id"]) for user in included["users"]]
|
||||
user_names = [user["username"] for user in included["users"]]
|
||||
|
||||
return (
|
||||
tweet_ids,
|
||||
tweet_texts,
|
||||
user_ids,
|
||||
user_names,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta, next_token = (
|
||||
self.get_timeline(
|
||||
credentials,
|
||||
input_data.max_results,
|
||||
input_data.start_time,
|
||||
input_data.end_time,
|
||||
input_data.since_id,
|
||||
input_data.until_id,
|
||||
input_data.sort_order,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if user_ids:
|
||||
yield "userIds", user_ids
|
||||
if user_names:
|
||||
yield "userNames", user_names
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetUserTweetsBlock(Block):
|
||||
"""
|
||||
Returns Tweets composed by a single user, specified by the requested user ID
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs, TweetTimeWindowInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="Unique identifier of the Twitter account (user ID) for whom to return results",
|
||||
placeholder="Enter user ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Number of tweets to retrieve (5-100)",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for pagination", default="", advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="List of Tweet IDs")
|
||||
texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
|
||||
userIds: list[str] = SchemaField(
|
||||
description="List of user ids that authored the tweets"
|
||||
)
|
||||
userNames: list[str] = SchemaField(
|
||||
description="List of user names that authored the tweets"
|
||||
)
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(
|
||||
description="Provides metadata such as pagination info (next_token) or result counts"
|
||||
)
|
||||
|
||||
# error
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c44c3ef2-a630-11ef-9ff7-eb7b5ea3a5cb",
|
||||
description="This block retrieves Tweets composed by a single user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserTweetsBlock.Input,
|
||||
output_schema=TwitterGetUserTweetsBlock.Output,
|
||||
test_input={
|
||||
"user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"max_results": 2,
|
||||
"start_time": "2024-12-14T18:30:00.000Z",
|
||||
"end_time": "2024-12-17T18:30:00.000Z",
|
||||
"since_id": None,
|
||||
"until_id": None,
|
||||
"sort_order": None,
|
||||
"pagination_token": None,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1373001119480344583", "1372627771717869568"]),
|
||||
("texts", ["Test tweet 1", "Test tweet 2"]),
|
||||
("userIds", ["67890", "67891"]),
|
||||
("userNames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test tweet 1"},
|
||||
{"id": "1372627771717869568", "text": "Test tweet 2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_user_tweets": lambda *args, **kwargs: (
|
||||
["1373001119480344583", "1372627771717869568"],
|
||||
["Test tweet 1", "Test tweet 2"],
|
||||
["67890", "67891"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "1373001119480344583", "text": "Test tweet 1"},
|
||||
{"id": "1372627771717869568", "text": "Test tweet 2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
max_results: int,
|
||||
start_time: datetime | None,
|
||||
end_time: datetime | None,
|
||||
since_id: str | None,
|
||||
until_id: str | None,
|
||||
sort_order: str | None,
|
||||
pagination: str | None,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": None if pagination == "" else pagination,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
# Adding time window to params If required by the user
|
||||
params = (
|
||||
TweetDurationBuilder(params)
|
||||
.add_start_time(start_time)
|
||||
.add_end_time(end_time)
|
||||
.add_since_id(since_id)
|
||||
.add_until_id(until_id)
|
||||
.add_sort_order(sort_order)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(
|
||||
Response,
|
||||
client.get_users_tweets(**params),
|
||||
)
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No tweets found")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
meta = response.meta or {}
|
||||
next_token = meta.get("next_token", "")
|
||||
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if "users" in included:
|
||||
user_ids = [str(user["id"]) for user in included["users"]]
|
||||
user_names = [user["username"] for user in included["users"]]
|
||||
|
||||
return (
|
||||
tweet_ids,
|
||||
tweet_texts,
|
||||
user_ids,
|
||||
user_names,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta, next_token = (
|
||||
self.get_user_tweets(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.max_results,
|
||||
input_data.start_time,
|
||||
input_data.end_time,
|
||||
input_data.since_id,
|
||||
input_data.until_id,
|
||||
input_data.sort_order,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if user_ids:
|
||||
yield "userIds", user_ids
|
||||
if user_names:
|
||||
yield "userNames", user_names
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,361 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import TweetExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
ExpansionFilter,
|
||||
TweetExpansionInputs,
|
||||
TweetFieldsFilter,
|
||||
TweetMediaFieldsFilter,
|
||||
TweetPlaceFieldsFilter,
|
||||
TweetPollFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetTweetBlock(Block):
|
||||
"""
|
||||
Returns information about a single Tweet specified by the requested ID
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_id: str = SchemaField(
|
||||
description="Unique identifier of the Tweet to request (ex: 1460323737035677698)",
|
||||
placeholder="Enter tweet ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
id: str = SchemaField(description="Tweet ID")
|
||||
text: str = SchemaField(description="Tweet text")
|
||||
userId: str = SchemaField(description="ID of the tweet author")
|
||||
userName: str = SchemaField(description="Username of the tweet author")
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: dict = SchemaField(description="Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about the tweet")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f5155c3a-a630-11ef-9cc1-a309988b4d92",
|
||||
description="This block retrieves information about a specific Tweet.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetTweetBlock.Input,
|
||||
output_schema=TwitterGetTweetBlock.Output,
|
||||
test_input={
|
||||
"tweet_id": "1460323737035677698",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "1460323737035677698"),
|
||||
("text", "Test tweet content"),
|
||||
("userId", "12345"),
|
||||
("userName", "testuser"),
|
||||
("data", {"id": "1460323737035677698", "text": "Test tweet content"}),
|
||||
("included", {"users": [{"id": "12345", "username": "testuser"}]}),
|
||||
("meta", {"result_count": 1}),
|
||||
],
|
||||
test_mock={
|
||||
"get_tweet": lambda *args, **kwargs: (
|
||||
{"id": "1460323737035677698", "text": "Test tweet content"},
|
||||
{"users": [{"id": "12345", "username": "testuser"}]},
|
||||
{"result_count": 1},
|
||||
"12345",
|
||||
"testuser",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_tweet(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_id: str,
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
params = {"id": tweet_id, "user_auth": False}
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_tweet(**params))
|
||||
|
||||
meta = {}
|
||||
user_id = ""
|
||||
user_name = ""
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_dict(response.data)
|
||||
|
||||
if included and "users" in included:
|
||||
user_id = str(included["users"][0]["id"])
|
||||
user_name = included["users"][0]["username"]
|
||||
|
||||
if response.data:
|
||||
return data, included, meta, user_id, user_name
|
||||
|
||||
raise Exception("Tweet not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
|
||||
tweet_data, included, meta, user_id, user_name = self.get_tweet(
|
||||
credentials,
|
||||
input_data.tweet_id,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
|
||||
yield "id", str(tweet_data["id"])
|
||||
yield "text", tweet_data["text"]
|
||||
if user_id:
|
||||
yield "userId", user_id
|
||||
if user_name:
|
||||
yield "userName", user_name
|
||||
yield "data", tweet_data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetTweetsBlock(Block):
|
||||
"""
|
||||
Returns information about multiple Tweets specified by the requested IDs
|
||||
"""
|
||||
|
||||
class Input(TweetExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["tweet.read", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
tweet_ids: list[str] = SchemaField(
|
||||
description="List of Tweet IDs to request (up to 100)",
|
||||
placeholder="Enter tweet IDs",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common Outputs that user commonly uses
|
||||
ids: list[str] = SchemaField(description="All Tweet IDs")
|
||||
texts: list[str] = SchemaField(description="All Tweet texts")
|
||||
userIds: list[str] = SchemaField(
|
||||
description="List of user ids that authored the tweets"
|
||||
)
|
||||
userNames: list[str] = SchemaField(
|
||||
description="List of user names that authored the tweets"
|
||||
)
|
||||
|
||||
# Complete Outputs for advanced use
|
||||
data: list[dict] = SchemaField(description="Complete Tweet data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data that you have requested (Optional) via Expansions field"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata about the tweets")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="e7cc5420-a630-11ef-bfaf-13bdd8096a51",
|
||||
description="This block retrieves information about multiple Tweets.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetTweetsBlock.Input,
|
||||
output_schema=TwitterGetTweetsBlock.Output,
|
||||
test_input={
|
||||
"tweet_ids": ["1460323737035677698"],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"media_fields": None,
|
||||
"place_fields": None,
|
||||
"poll_fields": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1460323737035677698"]),
|
||||
("texts", ["Test tweet content"]),
|
||||
("userIds", ["67890"]),
|
||||
("userNames", ["testuser1"]),
|
||||
("data", [{"id": "1460323737035677698", "text": "Test tweet content"}]),
|
||||
("included", {"users": [{"id": "67890", "username": "testuser1"}]}),
|
||||
("meta", {"result_count": 1}),
|
||||
],
|
||||
test_mock={
|
||||
"get_tweets": lambda *args, **kwargs: (
|
||||
["1460323737035677698"], # ids
|
||||
["Test tweet content"], # texts
|
||||
["67890"], # user_ids
|
||||
["testuser1"], # user_names
|
||||
[
|
||||
{"id": "1460323737035677698", "text": "Test tweet content"}
|
||||
], # data
|
||||
{"users": [{"id": "67890", "username": "testuser1"}]}, # included
|
||||
{"result_count": 1}, # meta
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_tweets(
|
||||
credentials: TwitterCredentials,
|
||||
tweet_ids: list[str],
|
||||
expansions: ExpansionFilter | None,
|
||||
media_fields: TweetMediaFieldsFilter | None,
|
||||
place_fields: TweetPlaceFieldsFilter | None,
|
||||
poll_fields: TweetPollFieldsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
params = {"ids": tweet_ids, "user_auth": False}
|
||||
|
||||
# Adding expansions to params If required by the user
|
||||
params = (
|
||||
TweetExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_media_fields(media_fields)
|
||||
.add_place_fields(place_fields)
|
||||
.add_poll_fields(poll_fields)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_tweets(**params))
|
||||
|
||||
if not response.data and not response.meta:
|
||||
raise Exception("No tweets found")
|
||||
|
||||
tweet_ids = []
|
||||
tweet_texts = []
|
||||
user_ids = []
|
||||
user_names = []
|
||||
meta = {}
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
tweet_ids = [str(tweet.id) for tweet in response.data]
|
||||
tweet_texts = [tweet.text for tweet in response.data]
|
||||
|
||||
if included and "users" in included:
|
||||
for user in included["users"]:
|
||||
user_ids.append(str(user["id"]))
|
||||
user_names.append(user["username"])
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
|
||||
return tweet_ids, tweet_texts, user_ids, user_names, data, included, meta
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, texts, user_ids, user_names, data, included, meta = self.get_tweets(
|
||||
credentials,
|
||||
input_data.tweet_ids,
|
||||
input_data.expansions,
|
||||
input_data.media_fields,
|
||||
input_data.place_fields,
|
||||
input_data.poll_fields,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if texts:
|
||||
yield "texts", texts
|
||||
if user_ids:
|
||||
yield "userIds", user_ids
|
||||
if user_names:
|
||||
yield "userNames", user_names
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
305
autogpt_platform/backend/backend/blocks/twitter/users/blocks.py
Normal file
305
autogpt_platform/backend/backend/blocks/twitter/users/blocks.py
Normal file
@@ -0,0 +1,305 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import IncludesSerializer
|
||||
from backend.blocks.twitter._types import (
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterUnblockUserBlock(Block):
|
||||
"""
|
||||
Unblock a specific user on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["block.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to unblock",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the unblock was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0f1b6570-a631-11ef-a3ea-230cbe9650dd",
|
||||
description="This block unblocks a specific user on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnblockUserBlock.Input,
|
||||
output_schema=TwitterUnblockUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unblock_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unblock_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unblock(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unblock_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetBlockedUsersBlock(Block):
|
||||
"""
|
||||
Get a list of users who are blocked by the authenticating user
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access", "block.read"]
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-1000, default 100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for retrieving next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
user_ids: list[str] = SchemaField(description="List of blocked user IDs")
|
||||
usernames_: list[str] = SchemaField(description="List of blocked usernames")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="05f409e8-a631-11ef-ae89-93de863ee30d",
|
||||
description="This block retrieves a list of users blocked by the authenticating user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetBlockedUsersBlock.Input,
|
||||
output_schema=TwitterGetBlockedUsersBlock.Output,
|
||||
test_input={
|
||||
"max_results": 10,
|
||||
"pagination_token": "",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("user_ids", ["12345", "67890"]),
|
||||
("usernames_", ["testuser1", "testuser2"]),
|
||||
],
|
||||
test_mock={
|
||||
"get_blocked_users": lambda *args, **kwargs: (
|
||||
{}, # included
|
||||
{}, # meta
|
||||
["12345", "67890"], # user_ids
|
||||
["testuser1", "testuser2"], # usernames
|
||||
None, # next_token
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_blocked_users(
|
||||
credentials: TwitterCredentials,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_blocked(**params))
|
||||
|
||||
meta = {}
|
||||
user_ids = []
|
||||
usernames = []
|
||||
next_token = None
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
|
||||
if response.data:
|
||||
for user in response.data:
|
||||
user_ids.append(str(user.id))
|
||||
usernames.append(user.username)
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
if "next_token" in meta:
|
||||
next_token = meta["next_token"]
|
||||
|
||||
if user_ids and usernames:
|
||||
return included, meta, user_ids, usernames, next_token
|
||||
else:
|
||||
raise tweepy.TweepyException("No blocked users found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
included, meta, user_ids, usernames, next_token = self.get_blocked_users(
|
||||
credentials,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if user_ids:
|
||||
yield "user_ids", user_ids
|
||||
if usernames:
|
||||
yield "usernames_", usernames
|
||||
if included:
|
||||
yield "included", included
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterBlockUserBlock(Block):
|
||||
"""
|
||||
Block a specific user on Twitter
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["block.write", "users.read", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to block",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the block was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="fc258b94-a630-11ef-abc3-df050b75b816",
|
||||
description="This block blocks a specific user on Twitter.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterBlockUserBlock.Input,
|
||||
output_schema=TwitterBlockUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"block_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def block_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.block(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.block_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
506
autogpt_platform/backend/backend/blocks/twitter/users/follows.py
Normal file
506
autogpt_platform/backend/backend/blocks/twitter/users/follows.py
Normal file
@@ -0,0 +1,506 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterUnfollowUserBlock(Block):
|
||||
"""
|
||||
Allows a user to unfollow another user specified by target user ID
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "users.write", "follows.write", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to unfollow",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the unfollow action was successful"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="37e386a4-a631-11ef-b7bd-b78204b35fa4",
|
||||
description="This block unfollows a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnfollowUserBlock.Input,
|
||||
output_schema=TwitterUnfollowUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unfollow_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unfollow_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unfollow_user(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unfollow_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterFollowUserBlock(Block):
|
||||
"""
|
||||
Allows a user to follow another user specified by target user ID
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "users.write", "follows.write", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to follow",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the follow action was successful"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="1aae6a5e-a631-11ef-a090-435900c6d429",
|
||||
description="This block follows a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterFollowUserBlock.Input,
|
||||
output_schema=TwitterFollowUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"follow_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def follow_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.follow_user(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.follow_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetFollowersBlock(Block):
|
||||
"""
|
||||
Retrieves a list of followers for a specified Twitter user ID
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access", "follows.read"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID whose followers you would like to retrieve",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-1000, default 100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for retrieving next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
ids: list[str] = SchemaField(description="List of follower user IDs")
|
||||
usernames: list[str] = SchemaField(description="List of follower usernames")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
data: list[dict] = SchemaField(description="Complete user data for followers")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="30f66410-a631-11ef-8fe7-d7f888b4f43c",
|
||||
description="This block retrieves followers of a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetFollowersBlock.Input,
|
||||
output_schema=TwitterGetFollowersBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"max_results": 1,
|
||||
"pagination_token": "",
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1234567890"]),
|
||||
("usernames", ["testuser"]),
|
||||
("data", [{"id": "1234567890", "username": "testuser"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_followers": lambda *args, **kwargs: (
|
||||
["1234567890"],
|
||||
["testuser"],
|
||||
[{"id": "1234567890", "username": "testuser"}],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_followers(
|
||||
credentials: TwitterCredentials,
|
||||
target_user_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": target_user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_users_followers(**params))
|
||||
|
||||
meta = {}
|
||||
follower_ids = []
|
||||
follower_usernames = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
follower_ids = [str(user.id) for user in response.data]
|
||||
follower_usernames = [user.username for user in response.data]
|
||||
|
||||
return (
|
||||
follower_ids,
|
||||
follower_usernames,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
raise Exception("Followers not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, usernames, data, includes, meta, next_token = self.get_followers(
|
||||
credentials,
|
||||
input_data.target_user_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if includes:
|
||||
yield "includes", includes
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetFollowingBlock(Block):
|
||||
"""
|
||||
Retrieves a list of users that a specified Twitter user ID is following
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access", "follows.read"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID whose following you would like to retrieve",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="Maximum number of results to return (1-1000, default 100)",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token for retrieving next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
ids: list[str] = SchemaField(description="List of following user IDs")
|
||||
usernames: list[str] = SchemaField(description="List of following usernames")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
data: list[dict] = SchemaField(description="Complete user data for following")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="264a399c-a631-11ef-a97d-bfde4ca91173",
|
||||
description="This block retrieves the users that a specified Twitter user is following.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetFollowingBlock.Input,
|
||||
output_schema=TwitterGetFollowingBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"max_results": 1,
|
||||
"pagination_token": None,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["1234567890"]),
|
||||
("usernames", ["testuser"]),
|
||||
("data", [{"id": "1234567890", "username": "testuser"}]),
|
||||
],
|
||||
test_mock={
|
||||
"get_following": lambda *args, **kwargs: (
|
||||
["1234567890"],
|
||||
["testuser"],
|
||||
[{"id": "1234567890", "username": "testuser"}],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_following(
|
||||
credentials: TwitterCredentials,
|
||||
target_user_id: str,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": target_user_id,
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_users_following(**params))
|
||||
|
||||
meta = {}
|
||||
following_ids = []
|
||||
following_usernames = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
following_ids = [str(user.id) for user in response.data]
|
||||
following_usernames = [user.username for user in response.data]
|
||||
|
||||
return (
|
||||
following_ids,
|
||||
following_usernames,
|
||||
data,
|
||||
included,
|
||||
meta,
|
||||
next_token,
|
||||
)
|
||||
|
||||
raise Exception("Following not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, usernames, data, includes, meta, next_token = self.get_following(
|
||||
credentials,
|
||||
input_data.target_user_id,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if includes:
|
||||
yield "includes", includes
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
327
autogpt_platform/backend/backend/blocks/twitter/users/mutes.py
Normal file
327
autogpt_platform/backend/backend/blocks/twitter/users/mutes.py
Normal file
@@ -0,0 +1,327 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterUnmuteUserBlock(Block):
|
||||
"""
|
||||
Allows a user to unmute another user specified by target user ID
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "users.write", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to unmute",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the unmute action was successful"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="40458504-a631-11ef-940b-eff92be55422",
|
||||
description="This block unmutes a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterUnmuteUserBlock.Input,
|
||||
output_schema=TwitterUnmuteUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"unmute_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def unmute_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.unmute(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.unmute_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetMutedUsersBlock(Block):
|
||||
"""
|
||||
Returns a list of users who are muted by the authenticating user
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access"]
|
||||
)
|
||||
|
||||
max_results: int = SchemaField(
|
||||
description="The maximum number of results to be returned per page (1-1000). Default is 100.",
|
||||
placeholder="Enter max results",
|
||||
default=10,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
pagination_token: str | None = SchemaField(
|
||||
description="Token to request next/previous page of results",
|
||||
placeholder="Enter pagination token",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
ids: list[str] = SchemaField(description="List of muted user IDs")
|
||||
usernames: list[str] = SchemaField(description="List of muted usernames")
|
||||
next_token: str = SchemaField(description="Next token for pagination")
|
||||
|
||||
data: list[dict] = SchemaField(description="Complete user data for muted users")
|
||||
includes: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
meta: dict = SchemaField(description="Metadata including pagination info")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="475024da-a631-11ef-9ccd-f724b8b03cda",
|
||||
description="This block gets a list of users muted by the authenticating user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetMutedUsersBlock.Input,
|
||||
output_schema=TwitterGetMutedUsersBlock.Output,
|
||||
test_input={
|
||||
"max_results": 2,
|
||||
"pagination_token": "",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["12345", "67890"]),
|
||||
("usernames", ["testuser1", "testuser2"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "12345", "username": "testuser1"},
|
||||
{"id": "67890", "username": "testuser2"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_muted_users": lambda *args, **kwargs: (
|
||||
["12345", "67890"],
|
||||
["testuser1", "testuser2"],
|
||||
[
|
||||
{"id": "12345", "username": "testuser1"},
|
||||
{"id": "67890", "username": "testuser2"},
|
||||
],
|
||||
{},
|
||||
{},
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_muted_users(
|
||||
credentials: TwitterCredentials,
|
||||
max_results: int,
|
||||
pagination_token: str | None,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"max_results": max_results,
|
||||
"pagination_token": (
|
||||
None if pagination_token == "" else pagination_token
|
||||
),
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_muted(**params))
|
||||
|
||||
meta = {}
|
||||
user_ids = []
|
||||
usernames = []
|
||||
next_token = None
|
||||
|
||||
if response.meta:
|
||||
meta = response.meta
|
||||
next_token = meta.get("next_token")
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
user_ids = [str(item.id) for item in response.data]
|
||||
usernames = [item.username for item in response.data]
|
||||
|
||||
return user_ids, usernames, data, included, meta, next_token
|
||||
|
||||
raise Exception("Muted users not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, usernames, data, includes, meta, next_token = self.get_muted_users(
|
||||
credentials,
|
||||
input_data.max_results,
|
||||
input_data.pagination_token,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if usernames:
|
||||
yield "usernames", usernames
|
||||
if next_token:
|
||||
yield "next_token", next_token
|
||||
if data:
|
||||
yield "data", data
|
||||
if includes:
|
||||
yield "includes", includes
|
||||
if meta:
|
||||
yield "meta", meta
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterMuteUserBlock(Block):
|
||||
"""
|
||||
Allows a user to mute another user specified by target user ID
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "users.write", "offline.access"]
|
||||
)
|
||||
|
||||
target_user_id: str = SchemaField(
|
||||
description="The user ID of the user that you would like to mute",
|
||||
placeholder="Enter target user ID",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the mute action was successful"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4d1919d0-a631-11ef-90ab-3b73af9ce8f1",
|
||||
description="This block mutes a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterMuteUserBlock.Input,
|
||||
output_schema=TwitterMuteUserBlock.Output,
|
||||
test_input={
|
||||
"target_user_id": "12345",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"mute_user": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def mute_user(credentials: TwitterCredentials, target_user_id: str):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
client.mute(target_user_id=target_user_id, user_auth=False)
|
||||
|
||||
return True
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.mute_user(credentials, input_data.target_user_id)
|
||||
yield "success", success
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -0,0 +1,359 @@
|
||||
from typing import cast
|
||||
|
||||
import tweepy
|
||||
from tweepy.client import Response
|
||||
|
||||
from backend.blocks.twitter._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TwitterCredentials,
|
||||
TwitterCredentialsField,
|
||||
TwitterCredentialsInput,
|
||||
)
|
||||
from backend.blocks.twitter._builders import UserExpansionsBuilder
|
||||
from backend.blocks.twitter._serializer import (
|
||||
IncludesSerializer,
|
||||
ResponseDataSerializer,
|
||||
)
|
||||
from backend.blocks.twitter._types import (
|
||||
TweetFieldsFilter,
|
||||
TweetUserFieldsFilter,
|
||||
UserExpansionInputs,
|
||||
UserExpansionsFilter,
|
||||
)
|
||||
from backend.blocks.twitter.tweepy_exceptions import handle_tweepy_exception
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TwitterGetUserBlock(Block):
|
||||
"""
|
||||
Gets information about a single Twitter user specified by ID or username
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_id: str = SchemaField(
|
||||
description="The ID of the user to lookup",
|
||||
placeholder="Enter user ID",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
username: str = SchemaField(
|
||||
description="The Twitter username (handle) of the user",
|
||||
placeholder="Enter username",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
id: str = SchemaField(description="User ID")
|
||||
username_: str = SchemaField(description="User username")
|
||||
name_: str = SchemaField(description="User name")
|
||||
|
||||
# Complete outputs
|
||||
data: dict = SchemaField(description="Complete user data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5446db8e-a631-11ef-812a-cf315d373ee9",
|
||||
description="This block retrieves information about a specified Twitter user.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUserBlock.Input,
|
||||
output_schema=TwitterGetUserBlock.Output,
|
||||
test_input={
|
||||
"user_id": "",
|
||||
"username": "twitter",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "783214"),
|
||||
("username_", "twitter"),
|
||||
("name_", "Twitter"),
|
||||
(
|
||||
"data",
|
||||
{
|
||||
"user": {
|
||||
"id": "783214",
|
||||
"username": "twitter",
|
||||
"name": "Twitter",
|
||||
}
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_user": lambda *args, **kwargs: (
|
||||
{
|
||||
"user": {
|
||||
"id": "783214",
|
||||
"username": "twitter",
|
||||
"name": "Twitter",
|
||||
}
|
||||
},
|
||||
{},
|
||||
"twitter",
|
||||
"783214",
|
||||
"Twitter",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_user(
|
||||
credentials: TwitterCredentials,
|
||||
user_id: str,
|
||||
username: str,
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"id": None if not user_id else user_id,
|
||||
"username": None if not username else username,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_user(**params))
|
||||
|
||||
username = ""
|
||||
id = ""
|
||||
name = ""
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_dict(response.data)
|
||||
|
||||
if response.data:
|
||||
username = response.data.username
|
||||
id = str(response.data.id)
|
||||
name = response.data.name
|
||||
|
||||
if username and id:
|
||||
return data, included, username, id, name
|
||||
else:
|
||||
raise tweepy.TweepyException("User not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, username, id, name = self.get_user(
|
||||
credentials,
|
||||
input_data.user_id,
|
||||
input_data.username,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if id:
|
||||
yield "id", id
|
||||
if username:
|
||||
yield "username_", username
|
||||
if name:
|
||||
yield "name_", name
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
|
||||
|
||||
class TwitterGetUsersBlock(Block):
|
||||
"""
|
||||
Gets information about multiple Twitter users specified by IDs or usernames
|
||||
"""
|
||||
|
||||
class Input(UserExpansionInputs):
|
||||
credentials: TwitterCredentialsInput = TwitterCredentialsField(
|
||||
["users.read", "offline.access"]
|
||||
)
|
||||
|
||||
user_ids: list[str] = SchemaField(
|
||||
description="List of user IDs to lookup (max 100)",
|
||||
placeholder="Enter user IDs",
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
usernames: list[str] = SchemaField(
|
||||
description="List of Twitter usernames/handles to lookup (max 100)",
|
||||
placeholder="Enter usernames",
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
# Common outputs
|
||||
ids: list[str] = SchemaField(description="User IDs")
|
||||
usernames_: list[str] = SchemaField(description="User usernames")
|
||||
names_: list[str] = SchemaField(description="User names")
|
||||
|
||||
# Complete outputs
|
||||
data: list[dict] = SchemaField(description="Complete users data")
|
||||
included: dict = SchemaField(
|
||||
description="Additional data requested via expansions"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5abc857c-a631-11ef-8cfc-f7b79354f7a1",
|
||||
description="This block retrieves information about multiple Twitter users.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=TwitterGetUsersBlock.Input,
|
||||
output_schema=TwitterGetUsersBlock.Output,
|
||||
test_input={
|
||||
"user_ids": [],
|
||||
"usernames": ["twitter", "twitterdev"],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"expansions": None,
|
||||
"tweet_fields": None,
|
||||
"user_fields": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["783214", "2244994945"]),
|
||||
("usernames_", ["twitter", "twitterdev"]),
|
||||
("names_", ["Twitter", "Twitter Dev"]),
|
||||
(
|
||||
"data",
|
||||
[
|
||||
{"id": "783214", "username": "twitter", "name": "Twitter"},
|
||||
{
|
||||
"id": "2244994945",
|
||||
"username": "twitterdev",
|
||||
"name": "Twitter Dev",
|
||||
},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_users": lambda *args, **kwargs: (
|
||||
[
|
||||
{"id": "783214", "username": "twitter", "name": "Twitter"},
|
||||
{
|
||||
"id": "2244994945",
|
||||
"username": "twitterdev",
|
||||
"name": "Twitter Dev",
|
||||
},
|
||||
],
|
||||
{},
|
||||
["twitter", "twitterdev"],
|
||||
["783214", "2244994945"],
|
||||
["Twitter", "Twitter Dev"],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_users(
|
||||
credentials: TwitterCredentials,
|
||||
user_ids: list[str],
|
||||
usernames: list[str],
|
||||
expansions: UserExpansionsFilter | None,
|
||||
tweet_fields: TweetFieldsFilter | None,
|
||||
user_fields: TweetUserFieldsFilter | None,
|
||||
):
|
||||
try:
|
||||
client = tweepy.Client(
|
||||
bearer_token=credentials.access_token.get_secret_value()
|
||||
)
|
||||
|
||||
params = {
|
||||
"ids": None if not user_ids else user_ids,
|
||||
"usernames": None if not usernames else usernames,
|
||||
"user_auth": False,
|
||||
}
|
||||
|
||||
params = (
|
||||
UserExpansionsBuilder(params)
|
||||
.add_expansions(expansions)
|
||||
.add_tweet_fields(tweet_fields)
|
||||
.add_user_fields(user_fields)
|
||||
.build()
|
||||
)
|
||||
|
||||
response = cast(Response, client.get_users(**params))
|
||||
|
||||
usernames = []
|
||||
ids = []
|
||||
names = []
|
||||
|
||||
included = IncludesSerializer.serialize(response.includes)
|
||||
data = ResponseDataSerializer.serialize_list(response.data)
|
||||
|
||||
if response.data:
|
||||
for user in response.data:
|
||||
usernames.append(user.username)
|
||||
ids.append(str(user.id))
|
||||
names.append(user.name)
|
||||
|
||||
if usernames and ids:
|
||||
return data, included, usernames, ids, names
|
||||
else:
|
||||
raise tweepy.TweepyException("Users not found")
|
||||
|
||||
except tweepy.TweepyException:
|
||||
raise
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TwitterCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
data, included, usernames, ids, names = self.get_users(
|
||||
credentials,
|
||||
input_data.user_ids,
|
||||
input_data.usernames,
|
||||
input_data.expansions,
|
||||
input_data.tweet_fields,
|
||||
input_data.user_fields,
|
||||
)
|
||||
if ids:
|
||||
yield "ids", ids
|
||||
if usernames:
|
||||
yield "usernames_", usernames
|
||||
if names:
|
||||
yield "names_", names
|
||||
if data:
|
||||
yield "data", data
|
||||
if included:
|
||||
yield "included", included
|
||||
except Exception as e:
|
||||
yield "error", handle_tweepy_exception(e)
|
||||
@@ -42,6 +42,7 @@ class BlockType(Enum):
|
||||
OUTPUT = "Output"
|
||||
NOTE = "Note"
|
||||
WEBHOOK = "Webhook"
|
||||
WEBHOOK_MANUAL = "Webhook (manual)"
|
||||
AGENT = "Agent"
|
||||
|
||||
|
||||
@@ -57,6 +58,7 @@ class BlockCategory(Enum):
|
||||
COMMUNICATION = "Block that interacts with communication platforms."
|
||||
DEVELOPER_TOOLS = "Developer tools such as GitHub blocks."
|
||||
DATA = "Block that interacts with structured data."
|
||||
HARDWARE = "Block that interacts with hardware."
|
||||
AGENT = "Block that interacts with other agents."
|
||||
CRM = "Block that interacts with CRM services."
|
||||
|
||||
@@ -65,7 +67,7 @@ class BlockCategory(Enum):
|
||||
|
||||
|
||||
class BlockSchema(BaseModel):
|
||||
cached_jsonschema: ClassVar[dict[str, Any]] = {}
|
||||
cached_jsonschema: ClassVar[dict[str, Any]]
|
||||
|
||||
@classmethod
|
||||
def jsonschema(cls) -> dict[str, Any]:
|
||||
@@ -90,6 +92,7 @@ class BlockSchema(BaseModel):
|
||||
}
|
||||
elif isinstance(obj, list):
|
||||
return [ref_to_dict(item) for item in obj]
|
||||
|
||||
return obj
|
||||
|
||||
cls.cached_jsonschema = cast(dict[str, Any], ref_to_dict(model))
|
||||
@@ -145,6 +148,10 @@ class BlockSchema(BaseModel):
|
||||
- A field that is called `credentials` MUST be a `CredentialsMetaInput`.
|
||||
"""
|
||||
super().__pydantic_init_subclass__(**kwargs)
|
||||
|
||||
# Reset cached JSON schema to prevent inheriting it from parent class
|
||||
cls.cached_jsonschema = {}
|
||||
|
||||
credentials_fields = [
|
||||
field_name
|
||||
for field_name, info in cls.model_fields.items()
|
||||
@@ -176,6 +183,11 @@ class BlockSchema(BaseModel):
|
||||
f"Field 'credentials' on {cls.__qualname__} "
|
||||
f"must be of type {CredentialsMetaInput.__name__}"
|
||||
)
|
||||
if credentials_field := cls.model_fields.get(CREDENTIALS_FIELD_NAME):
|
||||
credentials_input_type = cast(
|
||||
CredentialsMetaInput, credentials_field.annotation
|
||||
)
|
||||
credentials_input_type.validate_credentials_field_schema(cls)
|
||||
|
||||
|
||||
BlockSchemaInputType = TypeVar("BlockSchemaInputType", bound=BlockSchema)
|
||||
@@ -187,7 +199,12 @@ class EmptySchema(BlockSchema):
|
||||
|
||||
|
||||
# --8<-- [start:BlockWebhookConfig]
|
||||
class BlockWebhookConfig(BaseModel):
|
||||
class BlockManualWebhookConfig(BaseModel):
|
||||
"""
|
||||
Configuration model for webhook-triggered blocks on which
|
||||
the user has to manually set up the webhook at the provider.
|
||||
"""
|
||||
|
||||
provider: str
|
||||
"""The service provider that the webhook connects to"""
|
||||
|
||||
@@ -198,6 +215,27 @@ class BlockWebhookConfig(BaseModel):
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
event_filter_input: str = ""
|
||||
"""
|
||||
Name of the block's event filter input.
|
||||
Leave empty if the corresponding webhook doesn't have distinct event/payload types.
|
||||
"""
|
||||
|
||||
event_format: str = "{event}"
|
||||
"""
|
||||
Template string for the event(s) that a block instance subscribes to.
|
||||
Applied individually to each event selected in the event filter input.
|
||||
|
||||
Example: `"pull_request.{event}"` -> `"pull_request.opened"`
|
||||
"""
|
||||
|
||||
|
||||
class BlockWebhookConfig(BlockManualWebhookConfig):
|
||||
"""
|
||||
Configuration model for webhook-triggered blocks for which
|
||||
the webhook can be automatically set up through the provider's API.
|
||||
"""
|
||||
|
||||
resource_format: str
|
||||
"""
|
||||
Template string for the resource that a block instance subscribes to.
|
||||
@@ -207,17 +245,6 @@ class BlockWebhookConfig(BaseModel):
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
event_filter_input: str
|
||||
"""Name of the block's event filter input."""
|
||||
|
||||
event_format: str = "{event}"
|
||||
"""
|
||||
Template string for the event(s) that a block instance subscribes to.
|
||||
Applied individually to each event selected in the event filter input.
|
||||
|
||||
Example: `"pull_request.{event}"` -> `"pull_request.opened"`
|
||||
"""
|
||||
# --8<-- [end:BlockWebhookConfig]
|
||||
|
||||
|
||||
@@ -237,7 +264,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
disabled: bool = False,
|
||||
static_output: bool = False,
|
||||
block_type: BlockType = BlockType.STANDARD,
|
||||
webhook_config: Optional[BlockWebhookConfig] = None,
|
||||
webhook_config: Optional[BlockWebhookConfig | BlockManualWebhookConfig] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the block with the given schema.
|
||||
@@ -268,27 +295,38 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
self.contributors = contributors or set()
|
||||
self.disabled = disabled
|
||||
self.static_output = static_output
|
||||
self.block_type = block_type if not webhook_config else BlockType.WEBHOOK
|
||||
self.block_type = block_type
|
||||
self.webhook_config = webhook_config
|
||||
self.execution_stats = {}
|
||||
|
||||
if self.webhook_config:
|
||||
# Enforce shape of webhook event filter
|
||||
event_filter_field = self.input_schema.model_fields[
|
||||
self.webhook_config.event_filter_input
|
||||
]
|
||||
if not (
|
||||
isinstance(event_filter_field.annotation, type)
|
||||
and issubclass(event_filter_field.annotation, BaseModel)
|
||||
and all(
|
||||
field.annotation is bool
|
||||
for field in event_filter_field.annotation.model_fields.values()
|
||||
)
|
||||
):
|
||||
raise NotImplementedError(
|
||||
f"{self.name} has an invalid webhook event selector: "
|
||||
"field must be a BaseModel and all its fields must be boolean"
|
||||
)
|
||||
if isinstance(self.webhook_config, BlockWebhookConfig):
|
||||
# Enforce presence of credentials field on auto-setup webhook blocks
|
||||
if CREDENTIALS_FIELD_NAME not in self.input_schema.model_fields:
|
||||
raise TypeError(
|
||||
"credentials field is required on auto-setup webhook blocks"
|
||||
)
|
||||
self.block_type = BlockType.WEBHOOK
|
||||
else:
|
||||
self.block_type = BlockType.WEBHOOK_MANUAL
|
||||
|
||||
# Enforce shape of webhook event filter, if present
|
||||
if self.webhook_config.event_filter_input:
|
||||
event_filter_field = self.input_schema.model_fields[
|
||||
self.webhook_config.event_filter_input
|
||||
]
|
||||
if not (
|
||||
isinstance(event_filter_field.annotation, type)
|
||||
and issubclass(event_filter_field.annotation, BaseModel)
|
||||
and all(
|
||||
field.annotation is bool
|
||||
for field in event_filter_field.annotation.model_fields.values()
|
||||
)
|
||||
):
|
||||
raise NotImplementedError(
|
||||
f"{self.name} has an invalid webhook event selector: "
|
||||
"field must be a BaseModel and all its fields must be boolean"
|
||||
)
|
||||
|
||||
# Enforce presence of 'payload' input
|
||||
if "payload" not in self.input_schema.model_fields:
|
||||
|
||||
@@ -53,8 +53,8 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.LLAMA3_1_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_405B: 1,
|
||||
LlmModel.OLLAMA_DOLPHIN: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5_EXP: 1,
|
||||
LlmModel.GROK_BETA: 5,
|
||||
LlmModel.MISTRAL_NEMO: 1,
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||
@@ -62,6 +62,14 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.EVA_QWEN_2_5_32B: 1,
|
||||
LlmModel.DEEPSEEK_CHAT: 2,
|
||||
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: 1,
|
||||
LlmModel.QWEN_QWQ_32B_PREVIEW: 2,
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: 1,
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: 1,
|
||||
LlmModel.AMAZON_NOVA_LITE_V1: 1,
|
||||
LlmModel.AMAZON_NOVA_MICRO_V1: 1,
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: 1,
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: 1,
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: 1,
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
|
||||
@@ -10,6 +10,7 @@ class BlockCostType(str, Enum):
|
||||
RUN = "run" # cost X credits per run
|
||||
BYTE = "byte" # cost X credits per byte
|
||||
SECOND = "second" # cost X credits per second
|
||||
DOLLAR = "dollar" # cost X dollars per run
|
||||
|
||||
|
||||
class BlockCost(BaseModel):
|
||||
|
||||
@@ -2,9 +2,9 @@ from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from prisma import Json
|
||||
from prisma.enums import UserBlockCreditType
|
||||
from prisma.enums import CreditTransactionType
|
||||
from prisma.errors import UniqueViolationError
|
||||
from prisma.models import UserBlockCredit
|
||||
from prisma.models import CreditTransaction
|
||||
|
||||
from backend.data.block import Block, BlockInput, get_block
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
@@ -76,7 +76,7 @@ class UserCredit(UserCreditBase):
|
||||
else cur_month.replace(year=cur_month.year + 1, month=1)
|
||||
)
|
||||
|
||||
user_credit = await UserBlockCredit.prisma().group_by(
|
||||
user_credit = await CreditTransaction.prisma().group_by(
|
||||
by=["userId"],
|
||||
sum={"amount": True},
|
||||
where={
|
||||
@@ -93,10 +93,10 @@ class UserCredit(UserCreditBase):
|
||||
key = f"MONTHLY-CREDIT-TOP-UP-{cur_month}"
|
||||
|
||||
try:
|
||||
await UserBlockCredit.prisma().create(
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"amount": self.num_user_credits_refill,
|
||||
"type": UserBlockCreditType.TOP_UP,
|
||||
"type": CreditTransactionType.TOP_UP,
|
||||
"userId": user_id,
|
||||
"transactionKey": key,
|
||||
"createdAt": self.time_now(),
|
||||
@@ -184,11 +184,11 @@ class UserCredit(UserCreditBase):
|
||||
if validate_balance and user_credit < cost:
|
||||
raise ValueError(f"Insufficient credit: {user_credit} < {cost}")
|
||||
|
||||
await UserBlockCredit.prisma().create(
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"amount": -cost,
|
||||
"type": UserBlockCreditType.USAGE,
|
||||
"type": CreditTransactionType.USAGE,
|
||||
"blockId": block.id,
|
||||
"metadata": Json(
|
||||
{
|
||||
@@ -202,11 +202,11 @@ class UserCredit(UserCreditBase):
|
||||
return cost
|
||||
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
await UserBlockCredit.prisma().create(
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"type": UserBlockCreditType.TOP_UP,
|
||||
"type": CreditTransactionType.TOP_UP,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -29,6 +29,13 @@ async def connect():
|
||||
if not prisma.is_connected():
|
||||
raise ConnectionError("Failed to connect to Prisma.")
|
||||
|
||||
# Connection acquired from a pool like Supabase somehow still possibly allows
|
||||
# the db client obtains a connection but still reject query connection afterward.
|
||||
try:
|
||||
await prisma.execute_raw("SELECT 1")
|
||||
except Exception as e:
|
||||
raise ConnectionError("Failed to connect to Prisma.") from e
|
||||
|
||||
|
||||
@conn_retry("Prisma", "Releasing connection")
|
||||
async def disconnect():
|
||||
|
||||
@@ -9,7 +9,6 @@ from prisma.models import (
|
||||
AgentNodeExecution,
|
||||
AgentNodeExecutionInputOutput,
|
||||
)
|
||||
from prisma.types import AgentGraphExecutionWhereInput
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
@@ -19,14 +18,14 @@ from backend.util import json, mock
|
||||
from backend.util.settings import Config
|
||||
|
||||
|
||||
class GraphExecution(BaseModel):
|
||||
class GraphExecutionEntry(BaseModel):
|
||||
user_id: str
|
||||
graph_exec_id: str
|
||||
graph_id: str
|
||||
start_node_execs: list["NodeExecution"]
|
||||
start_node_execs: list["NodeExecutionEntry"]
|
||||
|
||||
|
||||
class NodeExecution(BaseModel):
|
||||
class NodeExecutionEntry(BaseModel):
|
||||
user_id: str
|
||||
graph_exec_id: str
|
||||
graph_id: str
|
||||
@@ -325,34 +324,6 @@ async def update_execution_status(
|
||||
return ExecutionResult.from_db(res)
|
||||
|
||||
|
||||
async def get_graph_execution(
|
||||
graph_exec_id: str, user_id: str
|
||||
) -> AgentGraphExecution | None:
|
||||
"""
|
||||
Retrieve a specific graph execution by its ID.
|
||||
|
||||
Args:
|
||||
graph_exec_id (str): The ID of the graph execution to retrieve.
|
||||
user_id (str): The ID of the user to whom the graph (execution) belongs.
|
||||
|
||||
Returns:
|
||||
AgentGraphExecution | None: The graph execution if found, None otherwise.
|
||||
"""
|
||||
execution = await AgentGraphExecution.prisma().find_first(
|
||||
where={"id": graph_exec_id, "userId": user_id},
|
||||
include=GRAPH_EXECUTION_INCLUDE,
|
||||
)
|
||||
return execution
|
||||
|
||||
|
||||
async def list_executions(graph_id: str, graph_version: int | None = None) -> list[str]:
|
||||
where: AgentGraphExecutionWhereInput = {"agentGraphId": graph_id}
|
||||
if graph_version is not None:
|
||||
where["agentGraphVersion"] = graph_version
|
||||
executions = await AgentGraphExecution.prisma().find_many(where=where)
|
||||
return [execution.id for execution in executions]
|
||||
|
||||
|
||||
async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
|
||||
executions = await AgentNodeExecution.prisma().find_many(
|
||||
where={"agentGraphExecutionId": graph_exec_id},
|
||||
|
||||
@@ -84,6 +84,8 @@ class NodeModel(Node):
|
||||
raise ValueError(f"Block #{self.block_id} not found for node #{self.id}")
|
||||
if not block.webhook_config:
|
||||
raise TypeError("This method can't be used on non-webhook blocks")
|
||||
if not block.webhook_config.event_filter_input:
|
||||
return True
|
||||
event_filter = self.input_default.get(block.webhook_config.event_filter_input)
|
||||
if not event_filter:
|
||||
raise ValueError(f"Event filter is not configured on node #{self.id}")
|
||||
@@ -105,6 +107,8 @@ class GraphExecution(BaseDbModel):
|
||||
duration: float
|
||||
total_run_time: float
|
||||
status: ExecutionStatus
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
@staticmethod
|
||||
def from_db(execution: AgentGraphExecution):
|
||||
@@ -130,6 +134,8 @@ class GraphExecution(BaseDbModel):
|
||||
duration=duration,
|
||||
total_run_time=total_run_time,
|
||||
status=ExecutionStatus(execution.executionStatus),
|
||||
graph_id=execution.agentGraphId,
|
||||
graph_version=execution.agentGraphVersion,
|
||||
)
|
||||
|
||||
|
||||
@@ -139,7 +145,6 @@ class Graph(BaseDbModel):
|
||||
is_template: bool = False
|
||||
name: str
|
||||
description: str
|
||||
executions: list[GraphExecution] = []
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
|
||||
@@ -265,11 +270,19 @@ class GraphModel(Graph):
|
||||
+ [sanitize(link.sink_name) for link in input_links.get(node.id, [])]
|
||||
)
|
||||
for name in block.input_schema.get_required_fields():
|
||||
if name not in provided_inputs and (
|
||||
for_run # Skip input completion validation, unless when executing.
|
||||
or block.block_type == BlockType.INPUT
|
||||
or block.block_type == BlockType.OUTPUT
|
||||
or block.block_type == BlockType.AGENT
|
||||
if (
|
||||
name not in provided_inputs
|
||||
and not (
|
||||
name == "payload"
|
||||
and block.block_type
|
||||
in (BlockType.WEBHOOK, BlockType.WEBHOOK_MANUAL)
|
||||
)
|
||||
and (
|
||||
for_run # Skip input completion validation, unless when executing.
|
||||
or block.block_type == BlockType.INPUT
|
||||
or block.block_type == BlockType.OUTPUT
|
||||
or block.block_type == BlockType.AGENT
|
||||
)
|
||||
):
|
||||
raise ValueError(
|
||||
f"Node {block.name} #{node.id} required input missing: `{name}`"
|
||||
@@ -289,7 +302,6 @@ class GraphModel(Graph):
|
||||
|
||||
# Validate dependencies between fields
|
||||
for field_name, field_info in input_schema.items():
|
||||
|
||||
# Apply input dependency validation only on run & field with depends_on
|
||||
json_schema_extra = field_info.json_schema_extra or {}
|
||||
dependencies = json_schema_extra.get("depends_on", [])
|
||||
@@ -356,12 +368,7 @@ class GraphModel(Graph):
|
||||
link.is_static = True # Each value block output should be static.
|
||||
|
||||
@staticmethod
|
||||
def from_db(graph: AgentGraph, hide_credentials: bool = False):
|
||||
executions = [
|
||||
GraphExecution.from_db(execution)
|
||||
for execution in graph.AgentGraphExecution or []
|
||||
]
|
||||
|
||||
def from_db(graph: AgentGraph, for_export: bool = False):
|
||||
return GraphModel(
|
||||
id=graph.id,
|
||||
user_id=graph.userId,
|
||||
@@ -370,9 +377,8 @@ class GraphModel(Graph):
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
nodes=[
|
||||
GraphModel._process_node(node, hide_credentials)
|
||||
NodeModel.from_db(GraphModel._process_node(node, for_export))
|
||||
for node in graph.AgentNodes or []
|
||||
],
|
||||
links=list(
|
||||
@@ -385,23 +391,29 @@ class GraphModel(Graph):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> NodeModel:
|
||||
node_dict = {field: getattr(node, field) for field in node.model_fields}
|
||||
if hide_credentials and "constantInput" in node_dict:
|
||||
constant_input = json.loads(
|
||||
node_dict["constantInput"], target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = GraphModel._hide_credentials_in_input(constant_input)
|
||||
node_dict["constantInput"] = json.dumps(constant_input)
|
||||
return NodeModel.from_db(AgentNode(**node_dict))
|
||||
def _process_node(node: AgentNode, for_export: bool) -> AgentNode:
|
||||
if for_export:
|
||||
# Remove credentials from node input
|
||||
if node.constantInput:
|
||||
constant_input = json.loads(
|
||||
node.constantInput, target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = GraphModel._hide_node_input_credentials(constant_input)
|
||||
node.constantInput = json.dumps(constant_input)
|
||||
|
||||
# Remove webhook info
|
||||
node.webhookId = None
|
||||
node.Webhook = None
|
||||
|
||||
return node
|
||||
|
||||
@staticmethod
|
||||
def _hide_credentials_in_input(input_data: dict[str, Any]) -> dict[str, Any]:
|
||||
def _hide_node_input_credentials(input_data: dict[str, Any]) -> dict[str, Any]:
|
||||
sensitive_keys = ["credentials", "api_key", "password", "token", "secret"]
|
||||
result = {}
|
||||
for key, value in input_data.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = GraphModel._hide_credentials_in_input(value)
|
||||
result[key] = GraphModel._hide_node_input_credentials(value)
|
||||
elif isinstance(value, str) and any(
|
||||
sensitive_key in key.lower() for sensitive_key in sensitive_keys
|
||||
):
|
||||
@@ -440,7 +452,6 @@ async def set_node_webhook(node_id: str, webhook_id: str | None) -> NodeModel:
|
||||
|
||||
async def get_graphs(
|
||||
user_id: str,
|
||||
include_executions: bool = False,
|
||||
filter_by: Literal["active", "template"] | None = "active",
|
||||
) -> list[GraphModel]:
|
||||
"""
|
||||
@@ -448,33 +459,50 @@ async def get_graphs(
|
||||
Default behaviour is to get all currently active graphs.
|
||||
|
||||
Args:
|
||||
include_executions: Whether to include executions in the graph metadata.
|
||||
filter_by: An optional filter to either select templates or active graphs.
|
||||
user_id: The ID of the user that owns the graph.
|
||||
|
||||
Returns:
|
||||
list[GraphModel]: A list of objects representing the retrieved graphs.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
where_clause: AgentGraphWhereInput = {"userId": user_id}
|
||||
|
||||
if filter_by == "active":
|
||||
where_clause["isActive"] = True
|
||||
elif filter_by == "template":
|
||||
where_clause["isTemplate"] = True
|
||||
|
||||
where_clause["userId"] = user_id
|
||||
|
||||
graph_include = AGENT_GRAPH_INCLUDE
|
||||
graph_include["AgentGraphExecution"] = include_executions
|
||||
|
||||
graphs = await AgentGraph.prisma().find_many(
|
||||
where=where_clause,
|
||||
distinct=["id"],
|
||||
order={"version": "desc"},
|
||||
include=graph_include,
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
)
|
||||
|
||||
return [GraphModel.from_db(graph) for graph in graphs]
|
||||
graph_models = []
|
||||
for graph in graphs:
|
||||
try:
|
||||
graph_models.append(GraphModel.from_db(graph))
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing graph {graph.id}: {e}")
|
||||
continue
|
||||
|
||||
return graph_models
|
||||
|
||||
|
||||
async def get_executions(user_id: str) -> list[GraphExecution]:
|
||||
executions = await AgentGraphExecution.prisma().find_many(
|
||||
where={"userId": user_id},
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
return [GraphExecution.from_db(execution) for execution in executions]
|
||||
|
||||
|
||||
async def get_execution(user_id: str, execution_id: str) -> GraphExecution | None:
|
||||
execution = await AgentGraphExecution.prisma().find_first(
|
||||
where={"id": execution_id, "userId": user_id}
|
||||
)
|
||||
return GraphExecution.from_db(execution) if execution else None
|
||||
|
||||
|
||||
async def get_graph(
|
||||
@@ -482,7 +510,7 @@ async def get_graph(
|
||||
version: int | None = None,
|
||||
template: bool = False,
|
||||
user_id: str | None = None,
|
||||
hide_credentials: bool = False,
|
||||
for_export: bool = False,
|
||||
) -> GraphModel | None:
|
||||
"""
|
||||
Retrieves a graph from the DB.
|
||||
@@ -493,13 +521,13 @@ async def get_graph(
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {
|
||||
"id": graph_id,
|
||||
"isTemplate": template,
|
||||
}
|
||||
if version is not None:
|
||||
where_clause["version"] = version
|
||||
elif not template:
|
||||
where_clause["isActive"] = True
|
||||
|
||||
# TODO: Fix hack workaround to get adding store agents to work
|
||||
if user_id is not None and not template:
|
||||
where_clause["userId"] = user_id
|
||||
|
||||
@@ -508,7 +536,7 @@ async def get_graph(
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
return GraphModel.from_db(graph, hide_credentials) if graph else None
|
||||
return GraphModel.from_db(graph, for_export) if graph else None
|
||||
|
||||
|
||||
async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None:
|
||||
|
||||
@@ -3,10 +3,12 @@ from typing import TYPE_CHECKING, AsyncGenerator, Optional
|
||||
|
||||
from prisma import Json
|
||||
from prisma.models import IntegrationWebhook
|
||||
from pydantic import Field
|
||||
from pydantic import Field, computed_field
|
||||
|
||||
from backend.data.includes import INTEGRATION_WEBHOOK_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks.utils import webhook_ingress_url
|
||||
|
||||
from .db import BaseDbModel
|
||||
|
||||
@@ -18,7 +20,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class Webhook(BaseDbModel):
|
||||
user_id: str
|
||||
provider: str
|
||||
provider: ProviderName
|
||||
credentials_id: str
|
||||
webhook_type: str
|
||||
resource: str
|
||||
@@ -30,6 +32,11 @@ class Webhook(BaseDbModel):
|
||||
|
||||
attached_nodes: Optional[list["NodeModel"]] = None
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def url(self) -> str:
|
||||
return webhook_ingress_url(self.provider.value, self.id)
|
||||
|
||||
@staticmethod
|
||||
def from_db(webhook: IntegrationWebhook):
|
||||
from .graph import NodeModel
|
||||
@@ -37,7 +44,7 @@ class Webhook(BaseDbModel):
|
||||
return Webhook(
|
||||
id=webhook.id,
|
||||
user_id=webhook.userId,
|
||||
provider=webhook.provider,
|
||||
provider=ProviderName(webhook.provider),
|
||||
credentials_id=webhook.credentialsId,
|
||||
webhook_type=webhook.webhookType,
|
||||
resource=webhook.resource,
|
||||
@@ -61,7 +68,7 @@ async def create_webhook(webhook: Webhook) -> Webhook:
|
||||
data={
|
||||
"id": webhook.id,
|
||||
"userId": webhook.user_id,
|
||||
"provider": webhook.provider,
|
||||
"provider": webhook.provider.value,
|
||||
"credentialsId": webhook.credentials_id,
|
||||
"webhookType": webhook.webhook_type,
|
||||
"resource": webhook.resource,
|
||||
@@ -83,8 +90,10 @@ async def get_webhook(webhook_id: str) -> Webhook:
|
||||
return Webhook.from_db(webhook)
|
||||
|
||||
|
||||
async def get_all_webhooks(credentials_id: str) -> list[Webhook]:
|
||||
async def get_all_webhooks_by_creds(credentials_id: str) -> list[Webhook]:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
if not credentials_id:
|
||||
raise ValueError("credentials_id must not be empty")
|
||||
webhooks = await IntegrationWebhook.prisma().find_many(
|
||||
where={"credentialsId": credentials_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
@@ -92,7 +101,7 @@ async def get_all_webhooks(credentials_id: str) -> list[Webhook]:
|
||||
return [Webhook.from_db(webhook) for webhook in webhooks]
|
||||
|
||||
|
||||
async def find_webhook(
|
||||
async def find_webhook_by_credentials_and_props(
|
||||
credentials_id: str, webhook_type: str, resource: str, events: list[str]
|
||||
) -> Webhook | None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
@@ -108,6 +117,22 @@ async def find_webhook(
|
||||
return Webhook.from_db(webhook) if webhook else None
|
||||
|
||||
|
||||
async def find_webhook_by_graph_and_props(
|
||||
graph_id: str, provider: str, webhook_type: str, events: list[str]
|
||||
) -> Webhook | None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_first(
|
||||
where={
|
||||
"provider": provider,
|
||||
"webhookType": webhook_type,
|
||||
"events": {"has_every": events},
|
||||
"AgentNodes": {"some": {"agentGraphId": graph_id}},
|
||||
},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook) if webhook else None
|
||||
|
||||
|
||||
async def update_webhook_config(webhook_id: str, updated_config: dict) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
_updated_webhook = await IntegrationWebhook.prisma().update(
|
||||
@@ -144,25 +169,28 @@ class WebhookEventBus(AsyncRedisEventBus[WebhookEvent]):
|
||||
def event_bus_name(self) -> str:
|
||||
return "webhooks"
|
||||
|
||||
async def publish(self, event: WebhookEvent):
|
||||
await self.publish_event(event, f"{event.webhook_id}/{event.event_type}")
|
||||
|
||||
async def listen(
|
||||
self, webhook_id: str, event_type: Optional[str] = None
|
||||
) -> AsyncGenerator[WebhookEvent, None]:
|
||||
async for event in self.listen_events(f"{webhook_id}/{event_type or '*'}"):
|
||||
yield event
|
||||
|
||||
|
||||
event_bus = WebhookEventBus()
|
||||
_webhook_event_bus = WebhookEventBus()
|
||||
|
||||
|
||||
async def publish_webhook_event(event: WebhookEvent):
|
||||
await event_bus.publish(event)
|
||||
await _webhook_event_bus.publish_event(
|
||||
event, f"{event.webhook_id}/{event.event_type}"
|
||||
)
|
||||
|
||||
|
||||
async def listen_for_webhook_event(
|
||||
async def listen_for_webhook_events(
|
||||
webhook_id: str, event_type: Optional[str] = None
|
||||
) -> AsyncGenerator[WebhookEvent, None]:
|
||||
async for event in _webhook_event_bus.listen_events(
|
||||
f"{webhook_id}/{event_type or '*'}"
|
||||
):
|
||||
yield event
|
||||
|
||||
|
||||
async def wait_for_webhook_event(
|
||||
webhook_id: str, event_type: Optional[str] = None, timeout: Optional[float] = None
|
||||
) -> WebhookEvent | None:
|
||||
async for event in event_bus.listen(webhook_id, event_type):
|
||||
return event # Only one event is expected
|
||||
return await _webhook_event_bus.wait_for_event(
|
||||
f"{webhook_id}/{event_type or '*'}", timeout
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Annotated,
|
||||
Any,
|
||||
Callable,
|
||||
@@ -11,19 +12,32 @@ from typing import (
|
||||
Optional,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
get_args,
|
||||
)
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import BaseModel, Field, GetCoreSchemaHandler, SecretStr, field_serializer
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
Field,
|
||||
GetCoreSchemaHandler,
|
||||
SecretStr,
|
||||
field_serializer,
|
||||
)
|
||||
from pydantic_core import (
|
||||
CoreSchema,
|
||||
PydanticUndefined,
|
||||
PydanticUndefinedType,
|
||||
ValidationError,
|
||||
core_schema,
|
||||
)
|
||||
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.data.block import BlockSchema
|
||||
|
||||
T = TypeVar("T")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -120,7 +134,7 @@ def SchemaField(
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
placeholder: Optional[str] = None,
|
||||
advanced: Optional[bool] = None,
|
||||
advanced: Optional[bool] = False,
|
||||
secret: bool = False,
|
||||
exclude: bool = False,
|
||||
hidden: Optional[bool] = None,
|
||||
@@ -148,7 +162,7 @@ def SchemaField(
|
||||
exclude=exclude,
|
||||
json_schema_extra=json_extra,
|
||||
**kwargs,
|
||||
)
|
||||
) # type: ignore
|
||||
|
||||
|
||||
class _BaseCredentials(BaseModel):
|
||||
@@ -203,6 +217,7 @@ class OAuthState(BaseModel):
|
||||
token: str
|
||||
provider: str
|
||||
expires_at: int
|
||||
code_verifier: Optional[str] = None
|
||||
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
|
||||
scopes: list[str]
|
||||
|
||||
@@ -222,7 +237,7 @@ class UserIntegrations(BaseModel):
|
||||
oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
|
||||
|
||||
CP = TypeVar("CP", bound=str)
|
||||
CP = TypeVar("CP", bound=ProviderName)
|
||||
CT = TypeVar("CT", bound=CredentialsType)
|
||||
|
||||
|
||||
@@ -235,19 +250,51 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
|
||||
provider: CP
|
||||
type: CT
|
||||
|
||||
@staticmethod
|
||||
def _add_json_schema_extra(schema, cls: CredentialsMetaInput):
|
||||
schema["credentials_provider"] = get_args(
|
||||
cls.model_fields["provider"].annotation
|
||||
)
|
||||
schema["credentials_types"] = get_args(cls.model_fields["type"].annotation)
|
||||
|
||||
class CredentialsFieldSchemaExtra(BaseModel, Generic[CP, CT]):
|
||||
model_config = ConfigDict(
|
||||
json_schema_extra=_add_json_schema_extra, # type: ignore
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def validate_credentials_field_schema(cls, model: type["BlockSchema"]):
|
||||
"""Validates the schema of a `credentials` field"""
|
||||
field_schema = model.jsonschema()["properties"][CREDENTIALS_FIELD_NAME]
|
||||
try:
|
||||
schema_extra = _CredentialsFieldSchemaExtra[CP, CT].model_validate(
|
||||
field_schema
|
||||
)
|
||||
except ValidationError as e:
|
||||
if "Field required [type=missing" not in str(e):
|
||||
raise
|
||||
|
||||
raise TypeError(
|
||||
"Field 'credentials' JSON schema lacks required extra items: "
|
||||
f"{field_schema}"
|
||||
) from e
|
||||
|
||||
if (
|
||||
len(schema_extra.credentials_provider) > 1
|
||||
and not schema_extra.discriminator
|
||||
):
|
||||
raise TypeError("Multi-provider CredentialsField requires discriminator!")
|
||||
|
||||
|
||||
class _CredentialsFieldSchemaExtra(BaseModel, Generic[CP, CT]):
|
||||
# TODO: move discrimination mechanism out of CredentialsField (frontend + backend)
|
||||
credentials_provider: list[CP]
|
||||
credentials_scopes: Optional[list[str]]
|
||||
credentials_scopes: Optional[list[str]] = None
|
||||
credentials_types: list[CT]
|
||||
discriminator: Optional[str] = None
|
||||
discriminator_mapping: Optional[dict[str, CP]] = None
|
||||
|
||||
|
||||
def CredentialsField(
|
||||
provider: CP | list[CP],
|
||||
supported_credential_types: set[CT],
|
||||
required_scopes: set[str] = set(),
|
||||
*,
|
||||
discriminator: Optional[str] = None,
|
||||
@@ -255,26 +302,26 @@ def CredentialsField(
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> CredentialsMetaInput[CP, CT]:
|
||||
) -> CredentialsMetaInput:
|
||||
"""
|
||||
`CredentialsField` must and can only be used on fields named `credentials`.
|
||||
This is enforced by the `BlockSchema` base class.
|
||||
"""
|
||||
if not isinstance(provider, str) and len(provider) > 1 and not discriminator:
|
||||
raise TypeError("Multi-provider CredentialsField requires discriminator!")
|
||||
|
||||
field_schema_extra = CredentialsFieldSchemaExtra[CP, CT](
|
||||
credentials_provider=[provider] if isinstance(provider, str) else provider,
|
||||
credentials_scopes=list(required_scopes) or None, # omit if empty
|
||||
credentials_types=list(supported_credential_types),
|
||||
discriminator=discriminator,
|
||||
discriminator_mapping=discriminator_mapping,
|
||||
)
|
||||
field_schema_extra = {
|
||||
k: v
|
||||
for k, v in {
|
||||
"credentials_scopes": list(required_scopes) or None,
|
||||
"discriminator": discriminator,
|
||||
"discriminator_mapping": discriminator_mapping,
|
||||
}.items()
|
||||
if v is not None
|
||||
}
|
||||
|
||||
return Field(
|
||||
title=title,
|
||||
description=description,
|
||||
json_schema_extra=field_schema_extra.model_dump(exclude_none=True),
|
||||
json_schema_extra=field_schema_extra, # validated on BlockSchema init
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, Optional, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
from redis.asyncio.client import PubSub as AsyncPubSub
|
||||
@@ -48,12 +49,12 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to parse event result from Redis {msg} {e}")
|
||||
|
||||
def _subscribe(
|
||||
def _get_pubsub_channel(
|
||||
self, connection: redis.Redis | redis.AsyncRedis, channel_key: str
|
||||
) -> tuple[PubSub | AsyncPubSub, str]:
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
full_channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
pubsub = connection.pubsub()
|
||||
return pubsub, channel_name
|
||||
return pubsub, full_channel_name
|
||||
|
||||
|
||||
class RedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
@@ -64,17 +65,19 @@ class RedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
return redis.get_redis()
|
||||
|
||||
def publish_event(self, event: M, channel_key: str):
|
||||
message, channel_name = self._serialize_message(event, channel_key)
|
||||
self.connection.publish(channel_name, message)
|
||||
message, full_channel_name = self._serialize_message(event, channel_key)
|
||||
self.connection.publish(full_channel_name, message)
|
||||
|
||||
def listen_events(self, channel_key: str) -> Generator[M, None, None]:
|
||||
pubsub, channel_name = self._subscribe(self.connection, channel_key)
|
||||
pubsub, full_channel_name = self._get_pubsub_channel(
|
||||
self.connection, channel_key
|
||||
)
|
||||
assert isinstance(pubsub, PubSub)
|
||||
|
||||
if "*" in channel_key:
|
||||
pubsub.psubscribe(channel_name)
|
||||
pubsub.psubscribe(full_channel_name)
|
||||
else:
|
||||
pubsub.subscribe(channel_name)
|
||||
pubsub.subscribe(full_channel_name)
|
||||
|
||||
for message in pubsub.listen():
|
||||
if event := self._deserialize_message(message, channel_key):
|
||||
@@ -89,19 +92,31 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
return await redis.get_redis_async()
|
||||
|
||||
async def publish_event(self, event: M, channel_key: str):
|
||||
message, channel_name = self._serialize_message(event, channel_key)
|
||||
message, full_channel_name = self._serialize_message(event, channel_key)
|
||||
connection = await self.connection
|
||||
await connection.publish(channel_name, message)
|
||||
await connection.publish(full_channel_name, message)
|
||||
|
||||
async def listen_events(self, channel_key: str) -> AsyncGenerator[M, None]:
|
||||
pubsub, channel_name = self._subscribe(await self.connection, channel_key)
|
||||
pubsub, full_channel_name = self._get_pubsub_channel(
|
||||
await self.connection, channel_key
|
||||
)
|
||||
assert isinstance(pubsub, AsyncPubSub)
|
||||
|
||||
if "*" in channel_key:
|
||||
await pubsub.psubscribe(channel_name)
|
||||
await pubsub.psubscribe(full_channel_name)
|
||||
else:
|
||||
await pubsub.subscribe(channel_name)
|
||||
await pubsub.subscribe(full_channel_name)
|
||||
|
||||
async for message in pubsub.listen():
|
||||
if event := self._deserialize_message(message, channel_key):
|
||||
yield event
|
||||
|
||||
async def wait_for_event(
|
||||
self, channel_key: str, timeout: Optional[float] = None
|
||||
) -> M | None:
|
||||
try:
|
||||
return await asyncio.wait_for(
|
||||
anext(aiter(self.listen_events(channel_key))), timeout
|
||||
)
|
||||
except TimeoutError:
|
||||
return None
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user