mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 16:48:06 -05:00
Compare commits
197 Commits
aryshare-r
...
update-blo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7cfc0a0c4f | ||
|
|
c869fd0119 | ||
|
|
097348e2ba | ||
|
|
2f97724ba5 | ||
|
|
805a988e21 | ||
|
|
6166e31942 | ||
|
|
fa56b68071 | ||
|
|
4207dc2bcf | ||
|
|
73d032a937 | ||
|
|
2011322511 | ||
|
|
09e1a4081f | ||
|
|
3831aa99e7 | ||
|
|
f15633833c | ||
|
|
2352c50433 | ||
|
|
00add5738f | ||
|
|
00b163b0a5 | ||
|
|
3c23fd5b1a | ||
|
|
7c719d6835 | ||
|
|
0b0c810861 | ||
|
|
1c1dda57e8 | ||
|
|
20e93f32ff | ||
|
|
8fc00dfe02 | ||
|
|
185d103371 | ||
|
|
dfc024acc3 | ||
|
|
791ab5d671 | ||
|
|
6dc88e3f21 | ||
|
|
d3f5718cac | ||
|
|
aef7b5db7d | ||
|
|
df071ca5a5 | ||
|
|
2b192a0d20 | ||
|
|
9572415b74 | ||
|
|
ad1bf2f27f | ||
|
|
67991f7c6d | ||
|
|
c2aad7d2d9 | ||
|
|
504a0a1250 | ||
|
|
69ae276bb8 | ||
|
|
66f2e2a77b | ||
|
|
c463d1022b | ||
|
|
88dc0bbe0b | ||
|
|
5ea176e457 | ||
|
|
134163aa88 | ||
|
|
ff494bee93 | ||
|
|
f02e2fd8bb | ||
|
|
2012213af5 | ||
|
|
94f4702f6b | ||
|
|
abf05d6407 | ||
|
|
58a8b0ddeb | ||
|
|
6a4a8f5a46 | ||
|
|
240ad756aa | ||
|
|
6c62a6a558 | ||
|
|
d36ac1471c | ||
|
|
a2b34739c9 | ||
|
|
7d4775e3b7 | ||
|
|
d133b474a8 | ||
|
|
14ef675784 | ||
|
|
2c9baa6966 | ||
|
|
43ca817a85 | ||
|
|
5cf97dd296 | ||
|
|
c577758b4a | ||
|
|
ed9d0b85b4 | ||
|
|
cfb92dd4f9 | ||
|
|
b892c2b272 | ||
|
|
d4679bbae8 | ||
|
|
34954d8df3 | ||
|
|
780c893c91 | ||
|
|
a113fdb134 | ||
|
|
b73cb9fc98 | ||
|
|
223e3de073 | ||
|
|
abfbdd0934 | ||
|
|
d8e38b505c | ||
|
|
4ebc34da62 | ||
|
|
056c539bde | ||
|
|
7e2e8843c0 | ||
|
|
a8cde7c3c5 | ||
|
|
64e59c5324 | ||
|
|
0da1461a79 | ||
|
|
192ff65bbf | ||
|
|
388003ff2d | ||
|
|
1081b15c91 | ||
|
|
0e17f5757b | ||
|
|
80de45c610 | ||
|
|
bd237c5b52 | ||
|
|
6f8f7ac716 | ||
|
|
cca71f99b9 | ||
|
|
e54a999ff4 | ||
|
|
36360b3ade | ||
|
|
a1607a3b21 | ||
|
|
8804417c72 | ||
|
|
6aeec36a3c | ||
|
|
6d09a46652 | ||
|
|
be4b2b1ba1 | ||
|
|
d2d6346f59 | ||
|
|
d83984bf38 | ||
|
|
146bf8e692 | ||
|
|
015e7d2b10 | ||
|
|
dac7e4aa57 | ||
|
|
1764cf9837 | ||
|
|
fbd1e26524 | ||
|
|
fb10bacfda | ||
|
|
f6a12828f0 | ||
|
|
fba186e5e1 | ||
|
|
91b88b840e | ||
|
|
f6b00f07ce | ||
|
|
a4bd7c9b58 | ||
|
|
bc643492e8 | ||
|
|
ba64e05803 | ||
|
|
01c0284fd2 | ||
|
|
a08a7bd1e1 | ||
|
|
25bab0eaa5 | ||
|
|
bb646e865e | ||
|
|
dc777ce89a | ||
|
|
a3955385ec | ||
|
|
88b03563f5 | ||
|
|
f57f6fc1c0 | ||
|
|
2f7ad767c2 | ||
|
|
e3e4bc8c96 | ||
|
|
d3cb3c73d1 | ||
|
|
cc75eea402 | ||
|
|
40961cb9f1 | ||
|
|
13be1a03b7 | ||
|
|
757381c889 | ||
|
|
154f0a2b83 | ||
|
|
bf91cc507f | ||
|
|
940fef027a | ||
|
|
d1b51ad09b | ||
|
|
4cb0bbe67e | ||
|
|
ebbb7b07cf | ||
|
|
db3d86bce6 | ||
|
|
f1f6f87b25 | ||
|
|
0d5ab20f14 | ||
|
|
4017c5be70 | ||
|
|
b7cc83854a | ||
|
|
0599083e0e | ||
|
|
9e36144b40 | ||
|
|
ce3c86cb1d | ||
|
|
e33263c9fc | ||
|
|
04f0f64fbb | ||
|
|
755a7b620d | ||
|
|
d54c9d4e7e | ||
|
|
852af17294 | ||
|
|
8889d029d4 | ||
|
|
e93c7dc89e | ||
|
|
7791281b90 | ||
|
|
cc9ff9e2bb | ||
|
|
6b82b9e73b | ||
|
|
61bfbeeb01 | ||
|
|
9518ff4f02 | ||
|
|
49490e899e | ||
|
|
d19866bdf5 | ||
|
|
b6c946ac4f | ||
|
|
d6f5dcd717 | ||
|
|
99779f52b2 | ||
|
|
d4e5a48163 | ||
|
|
a40bb6f6d6 | ||
|
|
a393f8bf9f | ||
|
|
d56931f4cb | ||
|
|
03691329be | ||
|
|
9813012c12 | ||
|
|
8f74e58ecc | ||
|
|
b1f5413dab | ||
|
|
cc41b2f4ab | ||
|
|
f45b4cc243 | ||
|
|
ed2e5e813d | ||
|
|
ab33d079e2 | ||
|
|
a24c869a44 | ||
|
|
d6b1cf64ed | ||
|
|
e9982ba9bd | ||
|
|
58c1e050f2 | ||
|
|
f125bb658c | ||
|
|
ba9c91d0b7 | ||
|
|
e6254f0e83 | ||
|
|
3c8cf8bd1e | ||
|
|
79f3888d61 | ||
|
|
ace5d34cc6 | ||
|
|
100ab90f44 | ||
|
|
a0ad796432 | ||
|
|
0cc625ca15 | ||
|
|
a43b329132 | ||
|
|
beeadd16f1 | ||
|
|
b2d5b9efb4 | ||
|
|
c77f32b23f | ||
|
|
b92223cf7b | ||
|
|
0d47b0ce38 | ||
|
|
c0409ba0b1 | ||
|
|
41c8504bdc | ||
|
|
c4d38e4ff3 | ||
|
|
b10a275676 | ||
|
|
8baabb0379 | ||
|
|
8bf977958a | ||
|
|
c73da1f79c | ||
|
|
3c3c1ce90a | ||
|
|
e9a198f5da | ||
|
|
706bf0578e | ||
|
|
610b613367 | ||
|
|
5f4a411b15 | ||
|
|
7b004f07e7 | ||
|
|
c16d2f94a6 |
@@ -1,18 +0,0 @@
|
||||
version = 1
|
||||
|
||||
test_patterns = ["**/*.spec.ts","**/*_test.py","**/*_tests.py","**/test_*.py"]
|
||||
|
||||
exclude_patterns = ["classic/**"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "javascript"
|
||||
|
||||
[analyzers.meta]
|
||||
plugins = ["react"]
|
||||
environment = ["nodejs"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "python"
|
||||
|
||||
[analyzers.meta]
|
||||
runtime_version = "3.x.x"
|
||||
24
.github/dependabot.yml
vendored
24
.github/dependabot.yml
vendored
@@ -129,6 +129,30 @@ updates:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# Submodules
|
||||
- package-ecosystem: "gitsubmodule"
|
||||
directory: "autogpt_platform/supabase"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 1
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(platform/deps)"
|
||||
prefix-development: "chore(platform/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# Docs
|
||||
- package-ecosystem: 'pip'
|
||||
directory: "docs/"
|
||||
|
||||
9
.github/workflows/classic-autogpt-ci.yml
vendored
9
.github/workflows/classic-autogpt-ci.yml
vendored
@@ -115,7 +115,6 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=autogpt --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--numprocesses=logical --durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
tests/unit tests/integration
|
||||
env:
|
||||
CI: true
|
||||
@@ -125,14 +124,8 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: autogpt-agent,${{ runner.os }}
|
||||
|
||||
9
.github/workflows/classic-benchmark-ci.yml
vendored
9
.github/workflows/classic-benchmark-ci.yml
vendored
@@ -87,20 +87,13 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=agbenchmark --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
tests
|
||||
env:
|
||||
CI: true
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: agbenchmark,${{ runner.os }}
|
||||
|
||||
9
.github/workflows/classic-forge-ci.yml
vendored
9
.github/workflows/classic-forge-ci.yml
vendored
@@ -139,7 +139,6 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=forge --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
forge
|
||||
env:
|
||||
CI: true
|
||||
@@ -149,14 +148,8 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: forge,${{ runner.os }}
|
||||
|
||||
@@ -34,7 +34,6 @@ jobs:
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
DIRECT_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
|
||||
trigger:
|
||||
|
||||
@@ -36,7 +36,6 @@ jobs:
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
DIRECT_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
trigger:
|
||||
needs: migrate
|
||||
|
||||
46
.github/workflows/platform-backend-ci.yml
vendored
46
.github/workflows/platform-backend-ci.yml
vendored
@@ -42,14 +42,6 @@ jobs:
|
||||
REDIS_PASSWORD: testpassword
|
||||
ports:
|
||||
- 6379:6379
|
||||
rabbitmq:
|
||||
image: rabbitmq:3.12-management
|
||||
ports:
|
||||
- 5672:5672
|
||||
- 15672:15672
|
||||
env:
|
||||
RABBITMQ_DEFAULT_USER: ${{ env.RABBITMQ_DEFAULT_USER }}
|
||||
RABBITMQ_DEFAULT_PASS: ${{ env.RABBITMQ_DEFAULT_PASS }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -66,7 +58,7 @@ jobs:
|
||||
- name: Setup Supabase
|
||||
uses: supabase/setup-cli@v1
|
||||
with:
|
||||
version: 1.178.1
|
||||
version: latest
|
||||
|
||||
- id: get_date
|
||||
name: Get date
|
||||
@@ -80,35 +72,18 @@ jobs:
|
||||
|
||||
- name: Install Poetry (Unix)
|
||||
run: |
|
||||
# Extract Poetry version from backend/poetry.lock
|
||||
HEAD_POETRY_VERSION=$(head -n 1 poetry.lock | grep -oP '(?<=Poetry )[0-9]+\.[0-9]+\.[0-9]+')
|
||||
echo "Found Poetry version ${HEAD_POETRY_VERSION} in backend/poetry.lock"
|
||||
|
||||
if [ -n "$BASE_REF" ]; then
|
||||
BASE_BRANCH=${BASE_REF/refs\/heads\//}
|
||||
BASE_POETRY_VERSION=$((git show "origin/$BASE_BRANCH":./poetry.lock; true) | head -n 1 | grep -oP '(?<=Poetry )[0-9]+\.[0-9]+\.[0-9]+')
|
||||
echo "Found Poetry version ${BASE_POETRY_VERSION} in backend/poetry.lock on ${BASE_REF}"
|
||||
POETRY_VERSION=$(printf '%s\n' "$HEAD_POETRY_VERSION" "$BASE_POETRY_VERSION" | sort -V | tail -n1)
|
||||
else
|
||||
POETRY_VERSION=$HEAD_POETRY_VERSION
|
||||
fi
|
||||
echo "Using Poetry version ${POETRY_VERSION}"
|
||||
|
||||
# Install Poetry
|
||||
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$POETRY_VERSION python3 -
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
if [ "${{ runner.os }}" = "macOS" ]; then
|
||||
PATH="$HOME/.local/bin:$PATH"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
env:
|
||||
BASE_REF: ${{ github.base_ref || github.event.merge_group.base_ref }}
|
||||
|
||||
- name: Check poetry.lock
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
if ! git diff --quiet --ignore-matching-lines="^# " poetry.lock; then
|
||||
if ! git diff --quiet poetry.lock; then
|
||||
echo "Error: poetry.lock not up to date."
|
||||
echo
|
||||
git diff poetry.lock
|
||||
@@ -135,7 +110,6 @@ jobs:
|
||||
run: poetry run prisma migrate dev --name updates
|
||||
env:
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
|
||||
- id: lint
|
||||
name: Run Linter
|
||||
@@ -152,13 +126,12 @@ jobs:
|
||||
env:
|
||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||
SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||
REDIS_HOST: "localhost"
|
||||
REDIS_PORT: "6379"
|
||||
REDIS_PASSWORD: "testpassword"
|
||||
REDIS_HOST: 'localhost'
|
||||
REDIS_PORT: '6379'
|
||||
REDIS_PASSWORD: 'testpassword'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
@@ -166,13 +139,6 @@ jobs:
|
||||
RUN_ENV: local
|
||||
PORT: 8080
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
# We know these are here, don't report this as a security vulnerability
|
||||
# This is used as the default credential for the entire system's RabbitMQ instance
|
||||
# If you want to replace this, you can do so by making our entire system generate
|
||||
# new credentials for each local user and update the environment variables in
|
||||
# the backend service, docker composes, and examples
|
||||
RABBITMQ_DEFAULT_USER: "rabbitmq_user_default"
|
||||
RABBITMQ_DEFAULT_PASS: "k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7"
|
||||
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v4
|
||||
|
||||
30
.github/workflows/platform-frontend-ci.yml
vendored
30
.github/workflows/platform-frontend-ci.yml
vendored
@@ -37,25 +37,6 @@ jobs:
|
||||
run: |
|
||||
yarn lint
|
||||
|
||||
type-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
yarn install --frozen-lockfile
|
||||
|
||||
- name: Run tsc check
|
||||
run: |
|
||||
yarn type-check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
@@ -77,12 +58,12 @@ jobs:
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
large-packages: false # slow
|
||||
docker-images: false # limited benefit
|
||||
large-packages: false # slow
|
||||
docker-images: false # limited benefit
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.example ../.env
|
||||
cp ../supabase/docker/.env.example ../.env
|
||||
|
||||
- name: Copy backend .env
|
||||
run: |
|
||||
@@ -104,12 +85,11 @@ jobs:
|
||||
run: yarn playwright install --with-deps ${{ matrix.browser }}
|
||||
|
||||
- name: Run tests
|
||||
timeout-minutes: 20
|
||||
run: |
|
||||
yarn test --project=${{ matrix.browser }}
|
||||
|
||||
- name: Print Final Docker Compose logs
|
||||
if: always()
|
||||
- name: Print Docker Compose logs in debug mode
|
||||
if: runner.debug
|
||||
run: |
|
||||
docker compose -f ../docker-compose.yml logs
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
# operations-per-run: 5000
|
||||
stale-issue-message: >
|
||||
This issue has automatically been marked as _stale_ because it has not had
|
||||
any activity in the last 170 days. You can _unstale_ it by commenting or
|
||||
any activity in the last 50 days. You can _unstale_ it by commenting or
|
||||
removing the label. Otherwise, this issue will be closed in 10 days.
|
||||
stale-pr-message: >
|
||||
This pull request has automatically been marked as _stale_ because it has
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
close-issue-message: >
|
||||
This issue was closed automatically because it has been stale for 10 days
|
||||
with no activity.
|
||||
days-before-stale: 170
|
||||
days-before-stale: 50
|
||||
days-before-close: 10
|
||||
# Do not touch meta issues:
|
||||
exempt-issue-labels: meta,fridge,project management
|
||||
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +1,6 @@
|
||||
[submodule "classic/forge/tests/vcr_cassettes"]
|
||||
path = classic/forge/tests/vcr_cassettes
|
||||
url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes
|
||||
[submodule "autogpt_platform/supabase"]
|
||||
path = autogpt_platform/supabase
|
||||
url = https://github.com/supabase/supabase.git
|
||||
|
||||
@@ -140,7 +140,7 @@ repos:
|
||||
language: system
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.10.0
|
||||
rev: 23.12.1
|
||||
# Black has sensible defaults, doesn't need package context, and ignores
|
||||
# everything in .gitignore, so it works fine without any config or arguments.
|
||||
hooks:
|
||||
@@ -170,16 +170,6 @@ repos:
|
||||
files: ^classic/benchmark/(agbenchmark|tests)/((?!reports).)*[/.]
|
||||
args: [--config=classic/benchmark/.flake8]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: Format (Prettier) - AutoGPT Platform - Frontend
|
||||
alias: format-platform-frontend
|
||||
entry: bash -c 'cd autogpt_platform/frontend && npx prettier --write $(echo "$@" | sed "s|autogpt_platform/frontend/||g")' --
|
||||
files: ^autogpt_platform/frontend/
|
||||
types: [file]
|
||||
language: system
|
||||
|
||||
- repo: local
|
||||
# To have watertight type checking, we check *all* the files in an affected
|
||||
# project. To trigger on poetry.lock we also reset the file `types` filter.
|
||||
@@ -231,16 +221,6 @@ repos:
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: tsc
|
||||
name: Typecheck - AutoGPT Platform - Frontend
|
||||
entry: bash -c 'cd autogpt_platform/frontend && npm run type-check'
|
||||
files: ^autogpt_platform/frontend/
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pytest
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -32,9 +32,9 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "backend.app",
|
||||
"env": {
|
||||
"OBJC_DISABLE_INITIALIZE_FORK_SAFETY": "YES"
|
||||
},
|
||||
// "env": {
|
||||
// "ENV": "dev"
|
||||
// },
|
||||
"envFile": "${workspaceFolder}/backend/.env",
|
||||
"justMyCode": false,
|
||||
"cwd": "${workspaceFolder}/autogpt_platform/backend"
|
||||
|
||||
@@ -2,6 +2,9 @@
|
||||
If you are reading this, you are probably looking for the full **[contribution guide]**,
|
||||
which is part of our [wiki].
|
||||
|
||||
Also check out our [🚀 Roadmap][roadmap] for information about our priorities and associated tasks.
|
||||
<!-- You can find our immediate priorities and their progress on our public [kanban board]. -->
|
||||
|
||||
[contribution guide]: https://github.com/Significant-Gravitas/AutoGPT/wiki/Contributing
|
||||
[wiki]: https://github.com/Significant-Gravitas/AutoGPT/wiki
|
||||
[roadmap]: https://github.com/Significant-Gravitas/AutoGPT/discussions/6971
|
||||
|
||||
@@ -15,11 +15,7 @@
|
||||
> Setting up and hosting the AutoGPT Platform yourself is a technical process.
|
||||
> If you'd rather something that just works, we recommend [joining the waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta.
|
||||
|
||||
### Updated Setup Instructions:
|
||||
We’ve moved to a fully maintained and regularly updated documentation site.
|
||||
|
||||
👉 [Follow the official self-hosting guide here](https://docs.agpt.co/platform/getting-started/)
|
||||
|
||||
https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603
|
||||
|
||||
This tutorial assumes you have Docker, VSCode, git and npm installed.
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ Instead, please report them via:
|
||||
- Please provide detailed reports with reproducible steps
|
||||
- Include the version/commit hash where you discovered the vulnerability
|
||||
- Allow us a 90-day security fix window before any public disclosure
|
||||
- After patch is released, allow 30 days for users to update before public disclosure (for a total of 120 days max between update time and fix time)
|
||||
- Share any potential mitigations or workarounds if known
|
||||
|
||||
## Supported Versions
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
############
|
||||
# Secrets
|
||||
# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION
|
||||
############
|
||||
|
||||
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME=supabase
|
||||
DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
|
||||
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||
|
||||
|
||||
############
|
||||
# Database - You can change these to any PostgreSQL database that has logical replication enabled.
|
||||
############
|
||||
|
||||
POSTGRES_HOST=db
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_PORT=5432
|
||||
# default user is postgres
|
||||
|
||||
|
||||
############
|
||||
# Supavisor -- Database pooler
|
||||
############
|
||||
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||
POOLER_DEFAULT_POOL_SIZE=20
|
||||
POOLER_MAX_CLIENT_CONN=100
|
||||
POOLER_TENANT_ID=your-tenant-id
|
||||
|
||||
|
||||
############
|
||||
# API Proxy - Configuration for the Kong Reverse proxy.
|
||||
############
|
||||
|
||||
KONG_HTTP_PORT=8000
|
||||
KONG_HTTPS_PORT=8443
|
||||
|
||||
|
||||
############
|
||||
# API - Configuration for PostgREST.
|
||||
############
|
||||
|
||||
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||
|
||||
|
||||
############
|
||||
# Auth - Configuration for the GoTrue authentication server.
|
||||
############
|
||||
|
||||
## General
|
||||
SITE_URL=http://localhost:3000
|
||||
ADDITIONAL_REDIRECT_URLS=
|
||||
JWT_EXPIRY=3600
|
||||
DISABLE_SIGNUP=false
|
||||
API_EXTERNAL_URL=http://localhost:8000
|
||||
|
||||
## Mailer Config
|
||||
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||
|
||||
## Email auth
|
||||
ENABLE_EMAIL_SIGNUP=true
|
||||
ENABLE_EMAIL_AUTOCONFIRM=false
|
||||
SMTP_ADMIN_EMAIL=admin@example.com
|
||||
SMTP_HOST=supabase-mail
|
||||
SMTP_PORT=2500
|
||||
SMTP_USER=fake_mail_user
|
||||
SMTP_PASS=fake_mail_password
|
||||
SMTP_SENDER_NAME=fake_sender
|
||||
ENABLE_ANONYMOUS_USERS=false
|
||||
|
||||
## Phone auth
|
||||
ENABLE_PHONE_SIGNUP=true
|
||||
ENABLE_PHONE_AUTOCONFIRM=true
|
||||
|
||||
|
||||
############
|
||||
# Studio - Configuration for the Dashboard
|
||||
############
|
||||
|
||||
STUDIO_DEFAULT_ORGANIZATION=Default Organization
|
||||
STUDIO_DEFAULT_PROJECT=Default Project
|
||||
|
||||
STUDIO_PORT=3000
|
||||
# replace if you intend to use Studio outside of localhost
|
||||
SUPABASE_PUBLIC_URL=http://localhost:8000
|
||||
|
||||
# Enable webp support
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||
|
||||
# Add your OpenAI API key to enable SQL Editor Assistant
|
||||
OPENAI_API_KEY=
|
||||
|
||||
|
||||
############
|
||||
# Functions - Configuration for Functions
|
||||
############
|
||||
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
|
||||
FUNCTIONS_VERIFY_JWT=false
|
||||
|
||||
|
||||
############
|
||||
# Logs - Configuration for Logflare
|
||||
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
|
||||
############
|
||||
|
||||
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Change vector.toml sinks to reflect this change
|
||||
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Docker socket location - this value will differ depending on your OS
|
||||
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
||||
|
||||
# Google Cloud Project details
|
||||
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
||||
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
||||
@@ -22,29 +22,35 @@ To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
2. Run the following command:
|
||||
```
|
||||
cp .env.example .env
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env`. You can modify the `.env` file to add your own environment variables.
|
||||
This command will initialize and update the submodules in the repository. The `supabase` folder will be cloned to the root directory.
|
||||
|
||||
3. Run the following command:
|
||||
```
|
||||
cp supabase/docker/.env.example .env
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env` in the `supabase/docker` directory. You can modify the `.env` file to add your own environment variables.
|
||||
|
||||
4. Run the following command:
|
||||
```
|
||||
docker compose up -d
|
||||
```
|
||||
This command will start all the necessary backend services defined in the `docker-compose.yml` file in detached mode.
|
||||
|
||||
4. Navigate to `frontend` within the `autogpt_platform` directory:
|
||||
5. Navigate to `frontend` within the `autogpt_platform` directory:
|
||||
```
|
||||
cd frontend
|
||||
```
|
||||
You will need to run your frontend application separately on your local machine.
|
||||
|
||||
5. Run the following command:
|
||||
6. Run the following command:
|
||||
```
|
||||
cp .env.example .env.local
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env.local` in the `frontend` directory. You can modify the `.env.local` within this folder to add your own environment variables for the frontend application.
|
||||
|
||||
6. Run the following command:
|
||||
7. Run the following command:
|
||||
```
|
||||
npm install
|
||||
npm run dev
|
||||
@@ -55,7 +61,7 @@ To run the AutoGPT Platform, follow these steps:
|
||||
yarn install && yarn dev
|
||||
```
|
||||
|
||||
7. Open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend.
|
||||
8. Open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend.
|
||||
|
||||
### Docker Compose Commands
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
from .config import Settings
|
||||
from .depends import requires_admin_user, requires_user
|
||||
from .jwt_utils import parse_jwt_token
|
||||
from .middleware import APIKeyValidator, auth_middleware
|
||||
from .middleware import auth_middleware
|
||||
from .models import User
|
||||
|
||||
__all__ = [
|
||||
"Settings",
|
||||
"parse_jwt_token",
|
||||
"requires_user",
|
||||
"requires_admin_user",
|
||||
"APIKeyValidator",
|
||||
"auth_middleware",
|
||||
"User",
|
||||
]
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import os
|
||||
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self):
|
||||
self.JWT_SECRET_KEY: str = os.getenv("SUPABASE_JWT_SECRET", "")
|
||||
self.ENABLE_AUTH: bool = os.getenv("ENABLE_AUTH", "false").lower() == "true"
|
||||
self.JWT_ALGORITHM: str = "HS256"
|
||||
JWT_SECRET_KEY: str = os.getenv("SUPABASE_JWT_SECRET", "")
|
||||
ENABLE_AUTH: bool = os.getenv("ENABLE_AUTH", "false").lower() == "true"
|
||||
JWT_ALGORITHM: str = "HS256"
|
||||
|
||||
@property
|
||||
def is_configured(self) -> bool:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fastapi
|
||||
|
||||
from .config import settings
|
||||
from .config import Settings
|
||||
from .middleware import auth_middleware
|
||||
from .models import DEFAULT_USER_ID, User
|
||||
|
||||
@@ -17,7 +17,7 @@ def requires_admin_user(
|
||||
|
||||
def verify_user(payload: dict | None, admin_only: bool) -> User:
|
||||
if not payload:
|
||||
if settings.ENABLE_AUTH:
|
||||
if Settings.ENABLE_AUTH:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=401, detail="Authorization header is missing"
|
||||
)
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
import inspect
|
||||
import logging
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
from fastapi import HTTPException, Request, Security
|
||||
from fastapi.security import APIKeyHeader, HTTPBearer
|
||||
from starlette.status import HTTP_401_UNAUTHORIZED
|
||||
from fastapi import HTTPException, Request
|
||||
from fastapi.security import HTTPBearer
|
||||
|
||||
from .config import settings
|
||||
from .jwt_utils import parse_jwt_token
|
||||
@@ -16,7 +13,7 @@ logger = logging.getLogger(__name__)
|
||||
async def auth_middleware(request: Request):
|
||||
if not settings.ENABLE_AUTH:
|
||||
# If authentication is disabled, allow the request to proceed
|
||||
logger.warning("Auth disabled")
|
||||
logger.warn("Auth disabled")
|
||||
return {}
|
||||
|
||||
security = HTTPBearer()
|
||||
@@ -32,104 +29,3 @@ async def auth_middleware(request: Request):
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=401, detail=str(e))
|
||||
return payload
|
||||
|
||||
|
||||
class APIKeyValidator:
|
||||
"""
|
||||
Configurable API key validator that supports custom validation functions
|
||||
for FastAPI applications.
|
||||
|
||||
This class provides a flexible way to implement API key authentication with optional
|
||||
custom validation logic. It can be used for simple token matching
|
||||
or more complex validation scenarios like database lookups.
|
||||
|
||||
Examples:
|
||||
Simple token validation:
|
||||
```python
|
||||
validator = APIKeyValidator(
|
||||
header_name="X-API-Key",
|
||||
expected_token="your-secret-token"
|
||||
)
|
||||
|
||||
@app.get("/protected", dependencies=[Depends(validator.get_dependency())])
|
||||
def protected_endpoint():
|
||||
return {"message": "Access granted"}
|
||||
```
|
||||
|
||||
Custom validation with database lookup:
|
||||
```python
|
||||
async def validate_with_db(api_key: str):
|
||||
api_key_obj = await db.get_api_key(api_key)
|
||||
return api_key_obj if api_key_obj and api_key_obj.is_active else None
|
||||
|
||||
validator = APIKeyValidator(
|
||||
header_name="X-API-Key",
|
||||
validate_fn=validate_with_db
|
||||
)
|
||||
```
|
||||
|
||||
Args:
|
||||
header_name (str): The name of the header containing the API key
|
||||
expected_token (Optional[str]): The expected API key value for simple token matching
|
||||
validate_fn (Optional[Callable]): Custom validation function that takes an API key
|
||||
string and returns a boolean or object. Can be async.
|
||||
error_status (int): HTTP status code to use for validation errors
|
||||
error_message (str): Error message to return when validation fails
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
header_name: str,
|
||||
expected_token: Optional[str] = None,
|
||||
validate_fn: Optional[Callable[[str], bool]] = None,
|
||||
error_status: int = HTTP_401_UNAUTHORIZED,
|
||||
error_message: str = "Invalid API key",
|
||||
):
|
||||
# Create the APIKeyHeader as a class property
|
||||
self.security_scheme = APIKeyHeader(name=header_name)
|
||||
self.expected_token = expected_token
|
||||
self.custom_validate_fn = validate_fn
|
||||
self.error_status = error_status
|
||||
self.error_message = error_message
|
||||
|
||||
async def default_validator(self, api_key: str) -> bool:
|
||||
return api_key == self.expected_token
|
||||
|
||||
async def __call__(
|
||||
self, request: Request, api_key: str = Security(APIKeyHeader)
|
||||
) -> Any:
|
||||
if api_key is None:
|
||||
raise HTTPException(status_code=self.error_status, detail="Missing API key")
|
||||
|
||||
# Use custom validation if provided, otherwise use default equality check
|
||||
validator = self.custom_validate_fn or self.default_validator
|
||||
result = (
|
||||
await validator(api_key)
|
||||
if inspect.iscoroutinefunction(validator)
|
||||
else validator(api_key)
|
||||
)
|
||||
|
||||
if not result:
|
||||
raise HTTPException(
|
||||
status_code=self.error_status, detail=self.error_message
|
||||
)
|
||||
|
||||
# Store validation result in request state if it's not just a boolean
|
||||
if result is not True:
|
||||
request.state.api_key = result
|
||||
|
||||
return result
|
||||
|
||||
def get_dependency(self):
|
||||
"""
|
||||
Returns a callable dependency that FastAPI will recognize as a security scheme
|
||||
"""
|
||||
|
||||
async def validate_api_key(
|
||||
request: Request, api_key: str = Security(self.security_scheme)
|
||||
) -> Any:
|
||||
return await self(request, api_key)
|
||||
|
||||
# This helps FastAPI recognize it as a security dependency
|
||||
validate_api_key.__name__ = f"validate_{self.security_scheme.model.name}"
|
||||
return validate_api_key
|
||||
|
||||
@@ -13,6 +13,7 @@ from typing_extensions import ParamSpec
|
||||
from .config import SETTINGS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
P = ParamSpec("P")
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -8,7 +8,7 @@ from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import AGPTFormatter
|
||||
from .formatters import AGPTFormatter, StructuredLoggingFormatter
|
||||
|
||||
LOG_DIR = Path(__file__).parent.parent.parent.parent / "logs"
|
||||
LOG_FILE = "activity.log"
|
||||
@@ -18,7 +18,7 @@ ERROR_LOG_FILE = "error.log"
|
||||
SIMPLE_LOG_FORMAT = "%(asctime)s %(levelname)s %(title)s%(message)s"
|
||||
|
||||
DEBUG_LOG_FORMAT = (
|
||||
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d %(title)s%(message)s"
|
||||
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d" " %(title)s%(message)s"
|
||||
)
|
||||
|
||||
|
||||
@@ -81,26 +81,9 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
"""
|
||||
|
||||
config = LoggingConfig()
|
||||
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
|
||||
# Cloud logging setup
|
||||
if config.enable_cloud_logging or force_cloud_logging:
|
||||
import google.cloud.logging
|
||||
@@ -114,7 +97,28 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
transport=SyncTransport,
|
||||
)
|
||||
cloud_handler.setLevel(config.level)
|
||||
cloud_handler.setFormatter(StructuredLoggingFormatter())
|
||||
log_handlers.append(cloud_handler)
|
||||
print("Cloud logging enabled")
|
||||
else:
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
print("Console logging enabled")
|
||||
|
||||
# File logging setup
|
||||
if config.enable_file_logging:
|
||||
@@ -152,6 +156,7 @@ def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
error_log_handler.setLevel(logging.ERROR)
|
||||
error_log_handler.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True))
|
||||
log_handlers.append(error_log_handler)
|
||||
print("File logging enabled")
|
||||
|
||||
# Configure the root logger
|
||||
logging.basicConfig(
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import logging
|
||||
|
||||
from colorama import Fore, Style
|
||||
from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
@@ -79,3 +80,16 @@ class AGPTFormatter(FancyConsoleFormatter):
|
||||
return remove_color_codes(super().format(record))
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class StructuredLoggingFormatter(StructuredLogHandler, logging.Formatter):
|
||||
def __init__(self):
|
||||
# Set up CloudLoggingFilter to add diagnostic info to the log records
|
||||
self.cloud_logging_filter = CloudLoggingFilter()
|
||||
|
||||
# Init StructuredLogHandler
|
||||
super().__init__()
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
self.cloud_logging_filter.filter(record)
|
||||
return super().format(record)
|
||||
|
||||
@@ -2,7 +2,6 @@ import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
import uvicorn.config
|
||||
from colorama import Fore
|
||||
|
||||
|
||||
@@ -26,14 +25,3 @@ def print_attribute(
|
||||
"color": value_color,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def generate_uvicorn_config():
|
||||
"""
|
||||
Generates a uvicorn logging config that silences uvicorn's default logging and tells it to use the native logging module.
|
||||
"""
|
||||
log_config = dict(uvicorn.config.LOGGING_CONFIG)
|
||||
log_config["loggers"]["uvicorn"] = {"handlers": []}
|
||||
log_config["loggers"]["uvicorn.error"] = {"handlers": []}
|
||||
log_config["loggers"]["uvicorn.access"] = {"handlers": []}
|
||||
return log_config
|
||||
|
||||
@@ -1,59 +1,20 @@
|
||||
import inspect
|
||||
import threading
|
||||
from typing import Awaitable, Callable, ParamSpec, TypeVar, cast, overload
|
||||
from typing import Callable, ParamSpec, TypeVar
|
||||
|
||||
P = ParamSpec("P")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
@overload
|
||||
def thread_cached(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]: ...
|
||||
|
||||
|
||||
@overload
|
||||
def thread_cached(func: Callable[P, R]) -> Callable[P, R]: ...
|
||||
|
||||
|
||||
def thread_cached(
|
||||
func: Callable[P, R] | Callable[P, Awaitable[R]],
|
||||
) -> Callable[P, R] | Callable[P, Awaitable[R]]:
|
||||
def thread_cached(func: Callable[P, R]) -> Callable[P, R]:
|
||||
thread_local = threading.local()
|
||||
|
||||
def _clear():
|
||||
if hasattr(thread_local, "cache"):
|
||||
del thread_local.cache
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = func(*args, **kwargs)
|
||||
return cache[key]
|
||||
|
||||
if inspect.iscoroutinefunction(func):
|
||||
|
||||
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = await cast(Callable[P, Awaitable[R]], func)(
|
||||
*args, **kwargs
|
||||
)
|
||||
return cache[key]
|
||||
|
||||
setattr(async_wrapper, "clear_cache", _clear)
|
||||
return async_wrapper
|
||||
|
||||
else:
|
||||
|
||||
def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
cache = getattr(thread_local, "cache", None)
|
||||
if cache is None:
|
||||
cache = thread_local.cache = {}
|
||||
key = (args, tuple(sorted(kwargs.items())))
|
||||
if key not in cache:
|
||||
cache[key] = func(*args, **kwargs)
|
||||
return cache[key]
|
||||
|
||||
setattr(sync_wrapper, "clear_cache", _clear)
|
||||
return sync_wrapper
|
||||
|
||||
|
||||
def clear_thread_cache(func: Callable) -> None:
|
||||
if clear := getattr(func, "clear_cache", None):
|
||||
clear()
|
||||
return wrapper
|
||||
|
||||
@@ -31,7 +31,7 @@ class RedisKeyedMutex:
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
if lock.locked() and lock.owned():
|
||||
if lock.locked():
|
||||
lock.release()
|
||||
|
||||
def acquire(self, key: Any) -> "RedisLock":
|
||||
|
||||
969
autogpt_platform/autogpt_libs/poetry.lock
generated
969
autogpt_platform/autogpt_libs/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -7,20 +7,21 @@ readme = "README.md"
|
||||
packages = [{ include = "autogpt_libs" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.12.1"
|
||||
pydantic = "^2.11.4"
|
||||
pydantic-settings = "^2.9.1"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.10.3"
|
||||
pydantic-settings = "^2.7.0"
|
||||
pyjwt = "^2.10.1"
|
||||
pytest-asyncio = "^0.26.0"
|
||||
pytest-asyncio = "^0.25.0"
|
||||
pytest-mock = "^3.14.0"
|
||||
supabase = "^2.15.1"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.10.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.1"
|
||||
ruff = "^0.11.10"
|
||||
ruff = "^0.8.6"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -2,33 +2,19 @@ DB_USER=postgres
|
||||
DB_PASS=your-super-secret-and-long-postgres-password
|
||||
DB_NAME=postgres
|
||||
DB_PORT=5432
|
||||
DB_HOST=localhost
|
||||
DB_CONNECTION_LIMIT=12
|
||||
DB_CONNECT_TIMEOUT=60
|
||||
DB_POOL_TIMEOUT=300
|
||||
DB_SCHEMA=platform
|
||||
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
DIRECT_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:${DB_PORT}/${DB_NAME}?connect_timeout=60&schema=platform"
|
||||
PRISMA_SCHEMA="postgres/schema.prisma"
|
||||
|
||||
# EXECUTOR
|
||||
NUM_GRAPH_WORKERS=10
|
||||
NUM_NODE_WORKERS=3
|
||||
|
||||
BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
# generate using `from cryptography.fernet import Fernet;Fernet.generate_key().decode()`
|
||||
ENCRYPTION_KEY='dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw='
|
||||
UNSUBSCRIBE_SECRET_KEY = 'HlP8ivStJjmbf6NKi78m_3FnOogut0t5ckzjsIqeaio='
|
||||
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=password
|
||||
|
||||
ENABLE_CREDIT=false
|
||||
STRIPE_API_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
||||
# What environment things should be logged under: local dev or prod
|
||||
APP_ENV=local
|
||||
# What environment to behave as: "local" or "cloud"
|
||||
@@ -36,26 +22,12 @@ BEHAVE_AS=local
|
||||
PYRO_HOST=localhost
|
||||
SENTRY_DSN=
|
||||
|
||||
# Email For Postmark so we can send emails
|
||||
POSTMARK_SERVER_API_TOKEN=
|
||||
POSTMARK_SENDER_EMAIL=invalid@invalid.com
|
||||
POSTMARK_WEBHOOK_TOKEN=
|
||||
|
||||
## User auth with Supabase is required for any of the 3rd party integrations with auth to work.
|
||||
ENABLE_AUTH=true
|
||||
SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# RabbitMQ credentials -- Used for communication between services
|
||||
RABBITMQ_HOST=localhost
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
|
||||
## GCS bucket is required for marketplace and library functionality
|
||||
MEDIA_GCS_BUCKET_NAME=
|
||||
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
@@ -64,14 +36,7 @@ MEDIA_GCS_BUCKET_NAME=
|
||||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=http://localhost:3000
|
||||
|
||||
## Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
## Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
## This is the backend secret key
|
||||
TURNSTILE_SECRET_KEY=
|
||||
## This is the verify URL
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
PLATFORM_BASE_URL=https://your-public-url-here
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
@@ -107,20 +72,6 @@ GOOGLE_CLIENT_SECRET=
|
||||
TWITTER_CLIENT_ID=
|
||||
TWITTER_CLIENT_SECRET=
|
||||
|
||||
# Linear App
|
||||
# Make a new workspace for your OAuth APP -- trust me
|
||||
# https://linear.app/settings/api/applications/new
|
||||
# Callback URL: http://localhost:3000/auth/integrations/oauth_callback
|
||||
LINEAR_CLIENT_ID=
|
||||
LINEAR_CLIENT_SECRET=
|
||||
|
||||
# To obtain Todoist API credentials:
|
||||
# 1. Create a Todoist account at todoist.com
|
||||
# 2. Visit the Developer Console: https://developer.todoist.com/appconsole.html
|
||||
# 3. Click "Create new app"
|
||||
# 4. Once created, copy your Client ID and Client Secret below
|
||||
TODOIST_CLIENT_ID=
|
||||
TODOIST_CLIENT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
@@ -129,15 +80,12 @@ OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
OPEN_ROUTER_API_KEY=
|
||||
LLAMA_API_KEY=
|
||||
|
||||
# Reddit
|
||||
# Go to https://www.reddit.com/prefs/apps and create a new app
|
||||
# Choose "script" for the type
|
||||
# Fill in the redirect uri as <your_frontend_url>/auth/integrations/oauth_callback, e.g. http://localhost:3000/auth/integrations/oauth_callback
|
||||
REDDIT_CLIENT_ID=
|
||||
REDDIT_CLIENT_SECRET=
|
||||
REDDIT_USER_AGENT="AutoGPT:1.0 (by /u/autogpt)"
|
||||
REDDIT_USERNAME=
|
||||
REDDIT_PASSWORD=
|
||||
|
||||
# Discord
|
||||
DISCORD_BOT_TOKEN=
|
||||
@@ -182,27 +130,9 @@ EXA_API_KEY=
|
||||
# E2B
|
||||
E2B_API_KEY=
|
||||
|
||||
# Mem0
|
||||
MEM0_API_KEY=
|
||||
|
||||
# Nvidia
|
||||
NVIDIA_API_KEY=
|
||||
|
||||
# Apollo
|
||||
APOLLO_API_KEY=
|
||||
|
||||
# SmartLead
|
||||
SMARTLEAD_API_KEY=
|
||||
|
||||
# ZeroBounce
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# Ayrshare
|
||||
AYRSHARE_API_KEY=
|
||||
AYRSHARE_JWT_KEY=
|
||||
|
||||
## ===== OPTIONAL API KEYS END ===== ##
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
|
||||
@@ -73,6 +73,7 @@ FROM server_dependencies AS server
|
||||
COPY autogpt_platform/backend /app/autogpt_platform/backend
|
||||
RUN poetry install --no-ansi --only-root
|
||||
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8000
|
||||
|
||||
CMD ["poetry", "run", "rest"]
|
||||
|
||||
@@ -1 +1,75 @@
|
||||
[Advanced Setup (Dev Branch)](https://dev-docs.agpt.co/platform/advanced_setup/#autogpt_agent_server_advanced_set_up)
|
||||
# AutoGPT Agent Server Advanced set up
|
||||
|
||||
This guide walks you through a dockerized set up, with an external DB (postgres)
|
||||
|
||||
## Setup
|
||||
|
||||
We use the Poetry to manage the dependencies. To set up the project, follow these steps inside this directory:
|
||||
|
||||
0. Install Poetry
|
||||
```sh
|
||||
pip install poetry
|
||||
```
|
||||
|
||||
1. Configure Poetry to use .venv in your project directory
|
||||
```sh
|
||||
poetry config virtualenvs.in-project true
|
||||
```
|
||||
|
||||
2. Enter the poetry shell
|
||||
|
||||
```sh
|
||||
poetry shell
|
||||
```
|
||||
|
||||
3. Install dependencies
|
||||
|
||||
```sh
|
||||
poetry install
|
||||
```
|
||||
|
||||
4. Copy .env.example to .env
|
||||
|
||||
```sh
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
5. Generate the Prisma client
|
||||
|
||||
```sh
|
||||
poetry run prisma generate
|
||||
```
|
||||
|
||||
|
||||
> In case Prisma generates the client for the global Python installation instead of the virtual environment, the current mitigation is to just uninstall the global Prisma package:
|
||||
>
|
||||
> ```sh
|
||||
> pip uninstall prisma
|
||||
> ```
|
||||
>
|
||||
> Then run the generation again. The path *should* look something like this:
|
||||
> `<some path>/pypoetry/virtualenvs/backend-TQIRSwR6-py3.12/bin/prisma`
|
||||
|
||||
6. Run the postgres database from the /rnd folder
|
||||
|
||||
```sh
|
||||
cd autogpt_platform/
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
7. Run the migrations (from the backend folder)
|
||||
|
||||
```sh
|
||||
cd ../backend
|
||||
prisma migrate deploy
|
||||
```
|
||||
|
||||
## Running The Server
|
||||
|
||||
### Starting the server directly
|
||||
|
||||
Run the following command:
|
||||
|
||||
```sh
|
||||
poetry run app
|
||||
```
|
||||
|
||||
@@ -1 +1,203 @@
|
||||
[Getting Started (Released)](https://docs.agpt.co/platform/getting-started/#autogpt_agent_server)
|
||||
# AutoGPT Agent Server
|
||||
|
||||
This is an initial project for creating the next generation of agent execution, which is an AutoGPT agent server.
|
||||
The agent server will enable the creation of composite multi-agent systems that utilize AutoGPT agents and other non-agent components as its primitives.
|
||||
|
||||
## Docs
|
||||
|
||||
You can access the docs for the [AutoGPT Agent Server here](https://docs.agpt.co/server/setup).
|
||||
|
||||
## Setup
|
||||
|
||||
We use the Poetry to manage the dependencies. To set up the project, follow these steps inside this directory:
|
||||
|
||||
0. Install Poetry
|
||||
```sh
|
||||
pip install poetry
|
||||
```
|
||||
|
||||
1. Configure Poetry to use .venv in your project directory
|
||||
```sh
|
||||
poetry config virtualenvs.in-project true
|
||||
```
|
||||
|
||||
2. Enter the poetry shell
|
||||
|
||||
```sh
|
||||
poetry shell
|
||||
```
|
||||
|
||||
3. Install dependencies
|
||||
|
||||
```sh
|
||||
poetry install
|
||||
```
|
||||
|
||||
4. Copy .env.example to .env
|
||||
|
||||
```sh
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
5. Generate the Prisma client
|
||||
|
||||
```sh
|
||||
poetry run prisma generate
|
||||
```
|
||||
|
||||
|
||||
> In case Prisma generates the client for the global Python installation instead of the virtual environment, the current mitigation is to just uninstall the global Prisma package:
|
||||
>
|
||||
> ```sh
|
||||
> pip uninstall prisma
|
||||
> ```
|
||||
>
|
||||
> Then run the generation again. The path *should* look something like this:
|
||||
> `<some path>/pypoetry/virtualenvs/backend-TQIRSwR6-py3.12/bin/prisma`
|
||||
|
||||
6. Migrate the database. Be careful because this deletes current data in the database.
|
||||
|
||||
```sh
|
||||
docker compose up db -d
|
||||
poetry run prisma migrate deploy
|
||||
```
|
||||
|
||||
## Running The Server
|
||||
|
||||
### Starting the server without Docker
|
||||
|
||||
Run the following command to run database in docker but the application locally:
|
||||
|
||||
```sh
|
||||
docker compose --profile local up deps --build --detach
|
||||
poetry run app
|
||||
```
|
||||
|
||||
### Starting the server with Docker
|
||||
|
||||
Run the following command to build the dockerfiles:
|
||||
|
||||
```sh
|
||||
docker compose build
|
||||
```
|
||||
|
||||
Run the following command to run the app:
|
||||
|
||||
```sh
|
||||
docker compose up
|
||||
```
|
||||
|
||||
Run the following to automatically rebuild when code changes, in another terminal:
|
||||
|
||||
```sh
|
||||
docker compose watch
|
||||
```
|
||||
|
||||
Run the following command to shut down:
|
||||
|
||||
```sh
|
||||
docker compose down
|
||||
```
|
||||
|
||||
If you run into issues with dangling orphans, try:
|
||||
|
||||
```sh
|
||||
docker compose down --volumes --remove-orphans && docker-compose up --force-recreate --renew-anon-volumes --remove-orphans
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
To run the tests:
|
||||
|
||||
```sh
|
||||
poetry run test
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
### Formatting & Linting
|
||||
Auto formatter and linter are set up in the project. To run them:
|
||||
|
||||
Install:
|
||||
```sh
|
||||
poetry install --with dev
|
||||
```
|
||||
|
||||
Format the code:
|
||||
```sh
|
||||
poetry run format
|
||||
```
|
||||
|
||||
Lint the code:
|
||||
```sh
|
||||
poetry run lint
|
||||
```
|
||||
|
||||
## Project Outline
|
||||
|
||||
The current project has the following main modules:
|
||||
|
||||
### **blocks**
|
||||
|
||||
This module stores all the Agent Blocks, which are reusable components to build a graph that represents the agent's behavior.
|
||||
|
||||
### **data**
|
||||
|
||||
This module stores the logical model that is persisted in the database.
|
||||
It abstracts the database operations into functions that can be called by the service layer.
|
||||
Any code that interacts with Prisma objects or the database should reside in this module.
|
||||
The main models are:
|
||||
* `block`: anything related to the block used in the graph
|
||||
* `execution`: anything related to the execution graph execution
|
||||
* `graph`: anything related to the graph, node, and its relations
|
||||
|
||||
### **execution**
|
||||
|
||||
This module stores the business logic of executing the graph.
|
||||
It currently has the following main modules:
|
||||
* `manager`: A service that consumes the queue of the graph execution and executes the graph. It contains both pieces of logic.
|
||||
* `scheduler`: A service that triggers scheduled graph execution based on a cron expression. It pushes an execution request to the manager.
|
||||
|
||||
### **server**
|
||||
|
||||
This module stores the logic for the server API.
|
||||
It contains all the logic used for the API that allows the client to create, execute, and monitor the graph and its execution.
|
||||
This API service interacts with other services like those defined in `manager` and `scheduler`.
|
||||
|
||||
### **utils**
|
||||
|
||||
This module stores utility functions that are used across the project.
|
||||
Currently, it has two main modules:
|
||||
* `process`: A module that contains the logic to spawn a new process.
|
||||
* `service`: A module that serves as a parent class for all the services in the project.
|
||||
|
||||
## Service Communication
|
||||
|
||||
Currently, there are only 3 active services:
|
||||
|
||||
- AgentServer (the API, defined in `server.py`)
|
||||
- ExecutionManager (the executor, defined in `manager.py`)
|
||||
- ExecutionScheduler (the scheduler, defined in `scheduler.py`)
|
||||
|
||||
The services run in independent Python processes and communicate through an IPC.
|
||||
A communication layer (`service.py`) is created to decouple the communication library from the implementation.
|
||||
|
||||
Currently, the IPC is done using Pyro5 and abstracted in a way that allows a function decorated with `@expose` to be called from a different process.
|
||||
|
||||
|
||||
By default the daemons run on the following ports:
|
||||
|
||||
Execution Manager Daemon: 8002
|
||||
Execution Scheduler Daemon: 8003
|
||||
Rest Server Daemon: 8004
|
||||
|
||||
## Adding a New Agent Block
|
||||
|
||||
To add a new agent block, you need to create a new class that inherits from `Block` and provides the following information:
|
||||
* All the block code should live in the `blocks` (`backend.blocks`) module.
|
||||
* `input_schema`: the schema of the input data, represented by a Pydantic object.
|
||||
* `output_schema`: the schema of the output data, represented by a Pydantic object.
|
||||
* `run` method: the main logic of the block.
|
||||
* `test_input` & `test_output`: the sample input and output data for the block, which will be used to auto-test the block.
|
||||
* You can mock the functions declared in the block using the `test_mock` field for your unit tests.
|
||||
* Once you finish creating the block, you can test it by running `poetry run pytest -s test/block/test_block.py`.
|
||||
|
||||
@@ -1,30 +1,22 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.util.process import AppProcess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_processes(*processes: "AppProcess", **kwargs):
|
||||
"""
|
||||
Execute all processes in the app. The last process is run in the foreground.
|
||||
Includes enhanced error handling and process lifecycle management.
|
||||
"""
|
||||
try:
|
||||
# Run all processes except the last one in the background.
|
||||
for process in processes[:-1]:
|
||||
process.start(background=True, **kwargs)
|
||||
|
||||
# Run the last process in the foreground.
|
||||
# Run the last process in the foreground
|
||||
processes[-1].start(background=False, **kwargs)
|
||||
finally:
|
||||
for process in processes:
|
||||
try:
|
||||
process.stop()
|
||||
except Exception as e:
|
||||
logger.exception(f"[{process.service_name}] unable to stop: {e}")
|
||||
process.stop()
|
||||
|
||||
|
||||
def main(**kwargs):
|
||||
@@ -32,16 +24,14 @@ def main(**kwargs):
|
||||
Run all the processes required for the AutoGPT-server (REST and WebSocket APIs).
|
||||
"""
|
||||
|
||||
from backend.executor import DatabaseManager, ExecutionManager, Scheduler
|
||||
from backend.notifications import NotificationManager
|
||||
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
|
||||
from backend.server.rest_api import AgentServer
|
||||
from backend.server.ws_api import WebsocketServer
|
||||
|
||||
run_processes(
|
||||
DatabaseManager(),
|
||||
ExecutionManager(),
|
||||
Scheduler(),
|
||||
NotificationManager(),
|
||||
ExecutionScheduler(),
|
||||
WebsocketServer(),
|
||||
AgentServer(),
|
||||
**kwargs,
|
||||
|
||||
@@ -1,99 +1,89 @@
|
||||
import functools
|
||||
import importlib
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, TypeVar
|
||||
from typing import Type, TypeVar
|
||||
|
||||
from backend.data.block import Block
|
||||
|
||||
# Dynamically load all modules under backend.blocks
|
||||
AVAILABLE_MODULES = []
|
||||
current_dir = Path(__file__).parent
|
||||
modules = [
|
||||
str(f.relative_to(current_dir))[:-3].replace(os.path.sep, ".")
|
||||
for f in current_dir.rglob("*.py")
|
||||
if f.is_file() and f.name != "__init__.py"
|
||||
]
|
||||
for module in modules:
|
||||
if not re.match("^[a-z0-9_.]+$", module):
|
||||
raise ValueError(
|
||||
f"Block module {module} error: module name must be lowercase, "
|
||||
"and contain only alphanumeric characters and underscores."
|
||||
)
|
||||
|
||||
importlib.import_module(f".{module}", package=__name__)
|
||||
AVAILABLE_MODULES.append(module)
|
||||
|
||||
# Load all Block instances from the available modules
|
||||
AVAILABLE_BLOCKS: dict[str, Type[Block]] = {}
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.data.block import Block
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
@functools.cache
|
||||
def load_all_blocks() -> dict[str, type["Block"]]:
|
||||
from backend.data.block import Block
|
||||
|
||||
# Dynamically load all modules under backend.blocks
|
||||
current_dir = Path(__file__).parent
|
||||
modules = [
|
||||
str(f.relative_to(current_dir))[:-3].replace(os.path.sep, ".")
|
||||
for f in current_dir.rglob("*.py")
|
||||
if f.is_file() and f.name != "__init__.py"
|
||||
]
|
||||
for module in modules:
|
||||
if not re.match("^[a-z0-9_.]+$", module):
|
||||
raise ValueError(
|
||||
f"Block module {module} error: module name must be lowercase, "
|
||||
"and contain only alphanumeric characters and underscores."
|
||||
)
|
||||
|
||||
importlib.import_module(f".{module}", package=__name__)
|
||||
|
||||
# Load all Block instances from the available modules
|
||||
available_blocks: dict[str, type["Block"]] = {}
|
||||
for block_cls in all_subclasses(Block):
|
||||
class_name = block_cls.__name__
|
||||
|
||||
if class_name.endswith("Base"):
|
||||
continue
|
||||
|
||||
if not class_name.endswith("Block"):
|
||||
raise ValueError(
|
||||
f"Block class {class_name} does not end with 'Block'. "
|
||||
"If you are creating an abstract class, "
|
||||
"please name the class with 'Base' at the end"
|
||||
)
|
||||
|
||||
block = block_cls.create()
|
||||
|
||||
if not isinstance(block.id, str) or len(block.id) != 36:
|
||||
raise ValueError(
|
||||
f"Block ID {block.name} error: {block.id} is not a valid UUID"
|
||||
)
|
||||
|
||||
if block.id in available_blocks:
|
||||
raise ValueError(
|
||||
f"Block ID {block.name} error: {block.id} is already in use"
|
||||
)
|
||||
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
f"{block.name} `error` field in output_schema must be a string"
|
||||
)
|
||||
|
||||
# Ensure all fields in input_schema and output_schema are annotated SchemaFields
|
||||
for field_name, field in [*input_schema.items(), *output_schema.items()]:
|
||||
if field.annotation is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} that is not annotated"
|
||||
)
|
||||
if field.json_schema_extra is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} not defined as SchemaField"
|
||||
)
|
||||
|
||||
for field in block.input_schema.model_fields.values():
|
||||
if field.annotation is bool and field.default not in (True, False):
|
||||
raise ValueError(
|
||||
f"{block.name} has a boolean field with no default value"
|
||||
)
|
||||
|
||||
available_blocks[block.id] = block_cls
|
||||
|
||||
return available_blocks
|
||||
|
||||
|
||||
__all__ = ["load_all_blocks"]
|
||||
|
||||
|
||||
def all_subclasses(cls: type[T]) -> list[type[T]]:
|
||||
def all_subclasses(cls: Type[T]) -> list[Type[T]]:
|
||||
subclasses = cls.__subclasses__()
|
||||
for subclass in subclasses:
|
||||
subclasses += all_subclasses(subclass)
|
||||
return subclasses
|
||||
|
||||
|
||||
for block_cls in all_subclasses(Block):
|
||||
name = block_cls.__name__
|
||||
|
||||
if block_cls.__name__.endswith("Base"):
|
||||
continue
|
||||
|
||||
if not block_cls.__name__.endswith("Block"):
|
||||
raise ValueError(
|
||||
f"Block class {block_cls.__name__} does not end with 'Block', If you are creating an abstract class, please name the class with 'Base' at the end"
|
||||
)
|
||||
|
||||
block = block_cls.create()
|
||||
|
||||
if not isinstance(block.id, str) or len(block.id) != 36:
|
||||
raise ValueError(f"Block ID {block.name} error: {block.id} is not a valid UUID")
|
||||
|
||||
if block.id in AVAILABLE_BLOCKS:
|
||||
raise ValueError(f"Block ID {block.name} error: {block.id} is already in use")
|
||||
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
f"{block.name} `error` field in output_schema must be a string"
|
||||
)
|
||||
|
||||
# Make sure all fields in input_schema and output_schema are annotated and has a value
|
||||
for field_name, field in [*input_schema.items(), *output_schema.items()]:
|
||||
if field.annotation is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} that is not annotated"
|
||||
)
|
||||
if field.json_schema_extra is None:
|
||||
raise ValueError(
|
||||
f"{block.name} has a field {field_name} not defined as SchemaField"
|
||||
)
|
||||
|
||||
for field in block.input_schema.model_fields.values():
|
||||
if field.annotation is bool and field.default not in (True, False):
|
||||
raise ValueError(f"{block.name} has a boolean field with no default value")
|
||||
|
||||
if block.disabled:
|
||||
continue
|
||||
|
||||
AVAILABLE_BLOCKS[block.id] = block_cls
|
||||
|
||||
__all__ = ["AVAILABLE_MODULES", "AVAILABLE_BLOCKS"]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
@@ -11,43 +12,36 @@ from backend.data.block import (
|
||||
get_block,
|
||||
)
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.model import CredentialsMetaInput, SchemaField
|
||||
from backend.util import json
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def get_executor_manager_client():
|
||||
from backend.executor import ExecutionManager
|
||||
from backend.util.service import get_service_client
|
||||
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def get_event_bus():
|
||||
from backend.data.execution import RedisExecutionEventBus
|
||||
|
||||
return RedisExecutionEventBus()
|
||||
|
||||
|
||||
class AgentExecutorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
user_id: str = SchemaField(description="User ID")
|
||||
graph_id: str = SchemaField(description="Graph ID")
|
||||
graph_version: int = SchemaField(description="Graph Version")
|
||||
|
||||
inputs: BlockInput = SchemaField(description="Input data for the graph")
|
||||
data: BlockInput = SchemaField(description="Input data for the graph")
|
||||
input_schema: dict = SchemaField(description="Input schema for the graph")
|
||||
output_schema: dict = SchemaField(description="Output schema for the graph")
|
||||
|
||||
node_credentials_input_map: Optional[
|
||||
dict[str, dict[str, CredentialsMetaInput]]
|
||||
] = SchemaField(default=None, hidden=True)
|
||||
|
||||
@classmethod
|
||||
def get_input_schema(cls, data: BlockInput) -> dict[str, Any]:
|
||||
return data.get("input_schema", {})
|
||||
|
||||
@classmethod
|
||||
def get_input_defaults(cls, data: BlockInput) -> BlockInput:
|
||||
return data.get("inputs", {})
|
||||
|
||||
@classmethod
|
||||
def get_missing_input(cls, data: BlockInput) -> set[str]:
|
||||
required_fields = cls.get_input_schema(data).get("required", [])
|
||||
return set(required_fields) - set(data)
|
||||
|
||||
@classmethod
|
||||
def get_mismatch_error(cls, data: BlockInput) -> str | None:
|
||||
return json.validate_with_jsonschema(cls.get_input_schema(data), data)
|
||||
|
||||
class Output(BlockSchema):
|
||||
pass
|
||||
|
||||
@@ -62,27 +56,26 @@ class AgentExecutorBlock(Block):
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
from backend.data.execution import ExecutionEventType
|
||||
from backend.executor import utils as execution_utils
|
||||
executor_manager = get_executor_manager_client()
|
||||
event_bus = get_event_bus()
|
||||
|
||||
event_bus = execution_utils.get_execution_event_bus()
|
||||
|
||||
graph_exec = execution_utils.add_graph_execution(
|
||||
graph_exec = executor_manager.add_execution(
|
||||
graph_id=input_data.graph_id,
|
||||
graph_version=input_data.graph_version,
|
||||
user_id=input_data.user_id,
|
||||
inputs=input_data.inputs,
|
||||
node_credentials_input_map=input_data.node_credentials_input_map,
|
||||
data=input_data.data,
|
||||
)
|
||||
log_id = f"Graph #{input_data.graph_id}-V{input_data.graph_version}, exec-id: {graph_exec.id}"
|
||||
log_id = f"Graph #{input_data.graph_id}-V{input_data.graph_version}, exec-id: {graph_exec.graph_exec_id}"
|
||||
logger.info(f"Starting execution of {log_id}")
|
||||
|
||||
for event in event_bus.listen(
|
||||
user_id=graph_exec.user_id,
|
||||
graph_id=graph_exec.graph_id,
|
||||
graph_exec_id=graph_exec.id,
|
||||
graph_id=graph_exec.graph_id, graph_exec_id=graph_exec.graph_exec_id
|
||||
):
|
||||
if event.event_type == ExecutionEventType.GRAPH_EXEC_UPDATE:
|
||||
logger.info(
|
||||
f"Execution {log_id} produced input {event.input_data} output {event.output_data}"
|
||||
)
|
||||
|
||||
if not event.node_id:
|
||||
if event.status in [
|
||||
ExecutionStatus.COMPLETED,
|
||||
ExecutionStatus.TERMINATED,
|
||||
@@ -93,10 +86,6 @@ class AgentExecutorBlock(Block):
|
||||
else:
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
f"Execution {log_id} produced input {event.input_data} output {event.output_data}"
|
||||
)
|
||||
|
||||
if not event.block_id:
|
||||
logger.warning(f"{log_id} received event without block_id {event}")
|
||||
continue
|
||||
@@ -111,7 +100,5 @@ class AgentExecutorBlock(Block):
|
||||
continue
|
||||
|
||||
for output_data in event.output_data.get("output", []):
|
||||
logger.debug(
|
||||
f"Execution {log_id} produced {output_name}: {output_data}"
|
||||
)
|
||||
logger.info(f"Execution {log_id} produced {output_name}: {output_data}")
|
||||
yield output_name, output_data
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockSchema
|
||||
@@ -170,7 +170,7 @@ class AIImageGeneratorBlock(Block):
|
||||
):
|
||||
try:
|
||||
# Initialize Replicate client
|
||||
client = ReplicateClient(api_token=credentials.api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=credentials.api_key.get_secret_value())
|
||||
|
||||
# Run the model with input parameters
|
||||
output = client.run(model_name, input=input_params, wait=False)
|
||||
|
||||
@@ -3,8 +3,8 @@ import time
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
@@ -196,7 +196,7 @@ class AIMusicGeneratorBlock(Block):
|
||||
normalization_strategy: NormalizationStrategy,
|
||||
):
|
||||
# Initialize Replicate client with the API key
|
||||
client = ReplicateClient(api_token=api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=api_key.get_secret_value())
|
||||
|
||||
# Run the model with parameters
|
||||
output = client.run(
|
||||
|
||||
@@ -52,7 +52,6 @@ class AudioTrack(str, Enum):
|
||||
REFRESHER = ("Refresher",)
|
||||
TOURIST = ("Tourist",)
|
||||
TWIN_TYCHES = ("Twin Tyches",)
|
||||
DONT_STOP_ME_ABSTRACT_FUTURE_BASS = ("Dont Stop Me Abstract Future Bass",)
|
||||
|
||||
@property
|
||||
def audio_url(self):
|
||||
@@ -78,7 +77,6 @@ class AudioTrack(str, Enum):
|
||||
AudioTrack.REFRESHER: "https://cdn.tfrv.xyz/audio/refresher.mp3",
|
||||
AudioTrack.TOURIST: "https://cdn.tfrv.xyz/audio/tourist.mp3",
|
||||
AudioTrack.TWIN_TYCHES: "https://cdn.tfrv.xyz/audio/twin-tynches.mp3",
|
||||
AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS: "https://cdn.revid.ai/audio/_dont-stop-me-abstract-future-bass.mp3",
|
||||
}
|
||||
return audio_urls[self]
|
||||
|
||||
@@ -106,7 +104,6 @@ class GenerationPreset(str, Enum):
|
||||
MOVIE = ("Movie",)
|
||||
STYLIZED_ILLUSTRATION = ("Stylized Illustration",)
|
||||
MANGA = ("Manga",)
|
||||
DEFAULT = ("DEFAULT",)
|
||||
|
||||
|
||||
class Voice(str, Enum):
|
||||
@@ -116,7 +113,6 @@ class Voice(str, Enum):
|
||||
JESSICA = "Jessica"
|
||||
CHARLOTTE = "Charlotte"
|
||||
CALLUM = "Callum"
|
||||
EVA = "Eva"
|
||||
|
||||
@property
|
||||
def voice_id(self):
|
||||
@@ -127,7 +123,6 @@ class Voice(str, Enum):
|
||||
Voice.JESSICA: "cgSgspJ2msm6clMCkdW9",
|
||||
Voice.CHARLOTTE: "XB0fDUnXU5powFXDhCwa",
|
||||
Voice.CALLUM: "N2lVS1w4EtoT3dr4eOWO",
|
||||
Voice.EVA: "FGY2WhTYpPnrIDTdsKH5",
|
||||
}
|
||||
return voice_id_map[self]
|
||||
|
||||
@@ -144,54 +139,7 @@ class VisualMediaType(str, Enum):
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _RevidMixin:
|
||||
"""Utility mix‑in that bundles the shared webhook / polling helpers."""
|
||||
|
||||
def create_video(self, api_key: SecretStr, payload: dict) -> dict:
|
||||
url = "https://www.revid.ai/api/public/v2/render"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.post(url, json=payload, headers=headers)
|
||||
logger.debug(
|
||||
f"API Response Status Code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def check_video_status(self, api_key: SecretStr, pid: str) -> dict:
|
||||
url = f"https://www.revid.ai/api/public/v2/status?pid={pid}"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.get(url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
def wait_for_video(
|
||||
self,
|
||||
api_key: SecretStr,
|
||||
pid: str,
|
||||
max_wait_time: int = 3600,
|
||||
) -> str:
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < max_wait_time:
|
||||
status = self.check_video_status(api_key, pid)
|
||||
logger.debug(f"Video status: {status}")
|
||||
|
||||
if status.get("status") == "ready" and "videoUrl" in status:
|
||||
return status["videoUrl"]
|
||||
elif status.get("status") == "error":
|
||||
error_message = status.get("error", "Unknown error occurred")
|
||||
logger.error(f"Video creation failed: {error_message}")
|
||||
raise ValueError(f"Video creation failed: {error_message}")
|
||||
elif status.get("status") in ["FAILED", "CANCELED"]:
|
||||
logger.error(f"Video creation failed: {status.get('message')}")
|
||||
raise ValueError(f"Video creation failed: {status.get('message')}")
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
logger.error("Video creation timed out")
|
||||
raise TimeoutError("Video creation timed out")
|
||||
|
||||
|
||||
class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Creates a short‑form text‑to‑video clip using stock or AI imagery."""
|
||||
|
||||
class AIShortformVideoCreatorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
@@ -258,28 +206,86 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"https://example.com/video.mp4",
|
||||
),
|
||||
test_mock={
|
||||
"create_webhook": lambda: (
|
||||
"test_uuid",
|
||||
"https://webhook.site/test_uuid",
|
||||
),
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/video.mp4",
|
||||
"wait_for_video": lambda api_key, pid, webhook_token, max_wait_time=1000: "https://example.com/video.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def create_webhook(self):
|
||||
url = "https://webhook.site/token"
|
||||
headers = {"Accept": "application/json", "Content-Type": "application/json"}
|
||||
response = requests.post(url, headers=headers)
|
||||
webhook_data = response.json()
|
||||
return webhook_data["uuid"], f"https://webhook.site/{webhook_data['uuid']}"
|
||||
|
||||
def create_video(self, api_key: SecretStr, payload: dict) -> dict:
|
||||
url = "https://www.revid.ai/api/public/v2/render"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.post(url, json=payload, headers=headers)
|
||||
logger.debug(
|
||||
f"API Response Status Code: {response.status_code}, Content: {response.text}"
|
||||
)
|
||||
return response.json()
|
||||
|
||||
def check_video_status(self, api_key: SecretStr, pid: str) -> dict:
|
||||
url = f"https://www.revid.ai/api/public/v2/status?pid={pid}"
|
||||
headers = {"key": api_key.get_secret_value()}
|
||||
response = requests.get(url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
def wait_for_video(
|
||||
self,
|
||||
api_key: SecretStr,
|
||||
pid: str,
|
||||
webhook_token: str,
|
||||
max_wait_time: int = 1000,
|
||||
) -> str:
|
||||
start_time = time.time()
|
||||
while time.time() - start_time < max_wait_time:
|
||||
status = self.check_video_status(api_key, pid)
|
||||
logger.debug(f"Video status: {status}")
|
||||
|
||||
if status.get("status") == "ready" and "videoUrl" in status:
|
||||
return status["videoUrl"]
|
||||
elif status.get("status") == "error":
|
||||
error_message = status.get("error", "Unknown error occurred")
|
||||
logger.error(f"Video creation failed: {error_message}")
|
||||
raise ValueError(f"Video creation failed: {error_message}")
|
||||
elif status.get("status") in ["FAILED", "CANCELED"]:
|
||||
logger.error(f"Video creation failed: {status.get('message')}")
|
||||
raise ValueError(f"Video creation failed: {status.get('message')}")
|
||||
|
||||
time.sleep(10)
|
||||
|
||||
logger.error("Video creation timed out")
|
||||
raise TimeoutError("Video creation timed out")
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
# Create a new Webhook.site URL
|
||||
webhook_token, webhook_url = self.create_webhook()
|
||||
logger.debug(f"Webhook URL: {webhook_url}")
|
||||
|
||||
audio_url = input_data.background_music.audio_url
|
||||
|
||||
payload = {
|
||||
"frameRate": input_data.frame_rate,
|
||||
"resolution": input_data.resolution,
|
||||
"frameDurationMultiplier": 18,
|
||||
"webhook": None,
|
||||
"webhook": webhook_url,
|
||||
"creationParams": {
|
||||
"mediaType": input_data.video_style,
|
||||
"captionPresetName": "Wrap 1",
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"hasEnhancedGeneration": True,
|
||||
"generationPreset": input_data.generation_preset.name,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedAudio": input_data.background_music,
|
||||
"origin": "/create",
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
@@ -295,7 +301,7 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
"selectedStoryStyle": {"value": "custom", "label": "Custom"},
|
||||
"hasToGenerateVideos": input_data.video_style
|
||||
!= VisualMediaType.STOCK_VIDEOS,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"audioUrl": audio_url,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -308,354 +314,10 @@ class AIShortformVideoCreatorBlock(Block, _RevidMixin):
|
||||
f"Failed to create video: No project ID returned. API Response: {response}"
|
||||
)
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
logger.debug(f"Video created with project ID: {pid}. Waiting for completion...")
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
logger.debug(f"Video ready: {video_url}")
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIAdMakerVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Generates a 30‑second vertical AI advert using optional user‑supplied imagery."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Credentials for Revid.ai API access.",
|
||||
)
|
||||
script: str = SchemaField(
|
||||
description="Short advertising copy. Line breaks create new scenes.",
|
||||
placeholder="Introducing Foobar – [show product photo] the gadget that does it all.",
|
||||
)
|
||||
ratio: str = SchemaField(description="Aspect ratio", default="9 / 16")
|
||||
target_duration: int = SchemaField(
|
||||
description="Desired length of the ad in seconds.", default=30
|
||||
)
|
||||
voice: Voice = SchemaField(
|
||||
description="Narration voice", default=Voice.EVA, placeholder=Voice.EVA
|
||||
)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
description="Background track",
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS,
|
||||
)
|
||||
input_media_urls: list[str] = SchemaField(
|
||||
description="List of image URLs to feature in the advert.", default=[]
|
||||
)
|
||||
use_only_provided_media: bool = SchemaField(
|
||||
description="Restrict visuals to supplied images only.", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="URL of the finished advert")
|
||||
error: str = SchemaField(description="Error message on failure")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3e3fd845-000e-457f-9f50-9f2f9e278bbd",
|
||||
description="Creates an AI‑generated 30‑second advert (text + images)",
|
||||
categories={BlockCategory.MARKETING, BlockCategory.AI},
|
||||
input_schema=AIAdMakerVideoCreatorBlock.Input,
|
||||
output_schema=AIAdMakerVideoCreatorBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"script": "Test product launch!",
|
||||
"input_media_urls": [
|
||||
"https://cdn.revid.ai/uploads/1747076315114-image.png",
|
||||
],
|
||||
},
|
||||
test_output=("video_url", "https://example.com/ad.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/ad.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"targetDuration": input_data.target_duration,
|
||||
"ratio": input_data.ratio,
|
||||
"mediaType": "aiVideo",
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
"slug": "ai-ad-generator",
|
||||
"slugNew": "",
|
||||
"isCopiedFrom": False,
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasAvatar": False,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"mediaMultiplier": "medium",
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": "",
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"forceModelType": "gpt-image-1",
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"useOnlyProvidedMedia": input_data.use_only_provided_media,
|
||||
"imageGenerationModel": "ultra",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [
|
||||
{"url": url, "title": "", "type": "image"}
|
||||
for url in input_data.input_media_urls
|
||||
],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIPromptToVideoCreatorBlock(Block, _RevidMixin):
|
||||
"""Turns a single creative prompt into a fully AI‑generated video."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(description="Revid.ai API credentials")
|
||||
prompt: str = SchemaField(
|
||||
description="Imaginative prompt describing the desired video.",
|
||||
placeholder="A neon‑lit cyberpunk alley with rain‑soaked pavements.",
|
||||
)
|
||||
ratio: str = SchemaField(default="9 / 16")
|
||||
prompt_target_duration: int = SchemaField(default=30)
|
||||
voice: Voice = SchemaField(default=Voice.EVA)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="Rendered video URL")
|
||||
error: str = SchemaField(description="Error message if any")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="46f4099c-ad01-4d79-874c-37a24c937ba3",
|
||||
description="Creates an AI video from a single prompt (no line‑breaking script).",
|
||||
categories={BlockCategory.AI, BlockCategory.SOCIAL},
|
||||
input_schema=AIPromptToVideoCreatorBlock.Input,
|
||||
output_schema=AIPromptToVideoCreatorBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"prompt": "Epic time‑lapse of a city skyline from day to night",
|
||||
},
|
||||
test_output=("video_url", "https://example.com/prompt.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/prompt.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"mediaType": "aiVideo",
|
||||
"flowType": "prompt-to-video",
|
||||
"slug": "prompt-to-video",
|
||||
"slugNew": "",
|
||||
"isCopiedFrom": False,
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasAvatar": False,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"ratio": input_data.ratio,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": input_data.prompt,
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"promptTargetDuration": input_data.prompt_target_duration,
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"imageGenerationModel": "good",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
|
||||
|
||||
class AIScreenshotToVideoAdBlock(Block, _RevidMixin):
|
||||
"""Creates an advert where the supplied screenshot is narrated by an AI avatar."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.REVID], Literal["api_key"]
|
||||
] = CredentialsField(description="Revid.ai API key")
|
||||
script: str = SchemaField(
|
||||
description="Narration that will accompany the screenshot.",
|
||||
placeholder="Check out these amazing stats!",
|
||||
)
|
||||
screenshot_url: str = SchemaField(
|
||||
description="Screenshot or image URL to showcase."
|
||||
)
|
||||
ratio: str = SchemaField(default="9 / 16")
|
||||
target_duration: int = SchemaField(default=30)
|
||||
voice: Voice = SchemaField(default=Voice.EVA)
|
||||
background_music: AudioTrack = SchemaField(
|
||||
default=AudioTrack.DONT_STOP_ME_ABSTRACT_FUTURE_BASS
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_url: str = SchemaField(description="Rendered video URL")
|
||||
error: str = SchemaField(description="Error, if encountered")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9f68982c-3af6-4923-9a97-b50a8c8d2234",
|
||||
description="Turns a screenshot into an engaging, avatar‑narrated video advert.",
|
||||
categories={BlockCategory.AI, BlockCategory.MARKETING},
|
||||
input_schema=AIScreenshotToVideoAdBlock.Input,
|
||||
output_schema=AIScreenshotToVideoAdBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"script": "Amazing numbers!",
|
||||
"screenshot_url": "https://cdn.revid.ai/uploads/1747080376028-image.png",
|
||||
},
|
||||
test_output=("video_url", "https://example.com/screenshot.mp4"),
|
||||
test_mock={
|
||||
"create_video": lambda api_key, payload: {"pid": "test_pid"},
|
||||
"wait_for_video": lambda api_key, pid, max_wait_time=3600: "https://example.com/screenshot.mp4",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs):
|
||||
|
||||
payload = {
|
||||
"webhook": None,
|
||||
"creationParams": {
|
||||
"targetDuration": input_data.target_duration,
|
||||
"ratio": input_data.ratio,
|
||||
"mediaType": "aiVideo",
|
||||
"hasAvatar": True,
|
||||
"removeAvatarBackground": True,
|
||||
"inputText": input_data.script,
|
||||
"flowType": "text-to-video",
|
||||
"slug": "ai-ad-generator",
|
||||
"slugNew": "screenshot-to-video-ad",
|
||||
"isCopiedFrom": "ai-ad-generator",
|
||||
"hasToGenerateVoice": True,
|
||||
"hasToTranscript": False,
|
||||
"hasToSearchMedia": True,
|
||||
"hasWebsiteRecorder": False,
|
||||
"hasTextSmallAtBottom": False,
|
||||
"selectedAudio": input_data.background_music.value,
|
||||
"selectedVoice": input_data.voice.voice_id,
|
||||
"selectedAvatar": "https://cdn.revid.ai/avatars/young-woman.mp4",
|
||||
"selectedAvatarType": "video/mp4",
|
||||
"websiteToRecord": "",
|
||||
"hasToGenerateCover": True,
|
||||
"nbGenerations": 1,
|
||||
"disableCaptions": False,
|
||||
"mediaMultiplier": "medium",
|
||||
"characters": [],
|
||||
"captionPresetName": "Revid",
|
||||
"sourceType": "contentScraping",
|
||||
"selectedStoryStyle": {"value": "custom", "label": "General"},
|
||||
"generationPreset": "DEFAULT",
|
||||
"hasToGenerateMusic": False,
|
||||
"isOptimizedForChinese": False,
|
||||
"generationUserPrompt": "",
|
||||
"enableNsfwFilter": False,
|
||||
"addStickers": False,
|
||||
"typeMovingImageAnim": "dynamic",
|
||||
"hasToGenerateSoundEffects": False,
|
||||
"forceModelType": "gpt-image-1",
|
||||
"selectedCharacters": [],
|
||||
"lang": "",
|
||||
"voiceSpeed": 1,
|
||||
"disableAudio": False,
|
||||
"disableVoice": False,
|
||||
"useOnlyProvidedMedia": True,
|
||||
"imageGenerationModel": "ultra",
|
||||
"videoGenerationModel": "base",
|
||||
"hasEnhancedGeneration": True,
|
||||
"hasEnhancedGenerationPro": True,
|
||||
"inputMedias": [
|
||||
{"url": input_data.screenshot_url, "title": "", "type": "image"}
|
||||
],
|
||||
"hasToGenerateVideos": True,
|
||||
"audioUrl": input_data.background_music.audio_url,
|
||||
"watermark": None,
|
||||
},
|
||||
}
|
||||
|
||||
response = self.create_video(credentials.api_key, payload)
|
||||
pid = response.get("pid")
|
||||
if not pid:
|
||||
raise RuntimeError("Failed to create video: No project ID returned")
|
||||
|
||||
video_url = self.wait_for_video(credentials.api_key, pid)
|
||||
yield "video_url", video_url
|
||||
else:
|
||||
logger.debug(
|
||||
f"Video created with project ID: {pid}. Waiting for completion..."
|
||||
)
|
||||
video_url = self.wait_for_video(credentials.api_key, pid, webhook_token)
|
||||
logger.debug(f"Video ready: {video_url}")
|
||||
yield "video_url", video_url
|
||||
|
||||
@@ -1,108 +0,0 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
from backend.blocks.apollo._auth import ApolloCredentials
|
||||
from backend.blocks.apollo.models import (
|
||||
Contact,
|
||||
Organization,
|
||||
SearchOrganizationsRequest,
|
||||
SearchOrganizationsResponse,
|
||||
SearchPeopleRequest,
|
||||
SearchPeopleResponse,
|
||||
)
|
||||
from backend.util.request import Requests
|
||||
|
||||
logger = logging.getLogger(name=__name__)
|
||||
|
||||
|
||||
class ApolloClient:
|
||||
"""Client for the Apollo API"""
|
||||
|
||||
API_URL = "https://api.apollo.io/api/v1"
|
||||
|
||||
def __init__(self, credentials: ApolloCredentials):
|
||||
self.credentials = credentials
|
||||
self.requests = Requests()
|
||||
|
||||
def _get_headers(self) -> dict[str, str]:
|
||||
return {"x-api-key": self.credentials.api_key.get_secret_value()}
|
||||
|
||||
def search_people(self, query: SearchPeopleRequest) -> List[Contact]:
|
||||
"""Search for people in Apollo"""
|
||||
response = self.requests.get(
|
||||
f"{self.API_URL}/mixed_people/search",
|
||||
headers=self._get_headers(),
|
||||
params=query.model_dump(exclude={"credentials", "max_results"}),
|
||||
)
|
||||
parsed_response = SearchPeopleResponse(**response.json())
|
||||
if parsed_response.pagination.total_entries == 0:
|
||||
return []
|
||||
|
||||
people = parsed_response.people
|
||||
|
||||
# handle pagination
|
||||
if (
|
||||
query.max_results is not None
|
||||
and query.max_results < parsed_response.pagination.total_entries
|
||||
and len(people) < query.max_results
|
||||
):
|
||||
while (
|
||||
len(people) < query.max_results
|
||||
and query.page < parsed_response.pagination.total_pages
|
||||
and len(parsed_response.people) > 0
|
||||
):
|
||||
query.page += 1
|
||||
response = self.requests.get(
|
||||
f"{self.API_URL}/mixed_people/search",
|
||||
headers=self._get_headers(),
|
||||
params=query.model_dump(exclude={"credentials", "max_results"}),
|
||||
)
|
||||
parsed_response = SearchPeopleResponse(**response.json())
|
||||
people.extend(parsed_response.people[: query.max_results - len(people)])
|
||||
|
||||
logger.info(f"Found {len(people)} people")
|
||||
return people[: query.max_results] if query.max_results else people
|
||||
|
||||
def search_organizations(
|
||||
self, query: SearchOrganizationsRequest
|
||||
) -> List[Organization]:
|
||||
"""Search for organizations in Apollo"""
|
||||
response = self.requests.get(
|
||||
f"{self.API_URL}/mixed_companies/search",
|
||||
headers=self._get_headers(),
|
||||
params=query.model_dump(exclude={"credentials", "max_results"}),
|
||||
)
|
||||
parsed_response = SearchOrganizationsResponse(**response.json())
|
||||
if parsed_response.pagination.total_entries == 0:
|
||||
return []
|
||||
|
||||
organizations = parsed_response.organizations
|
||||
|
||||
# handle pagination
|
||||
if (
|
||||
query.max_results is not None
|
||||
and query.max_results < parsed_response.pagination.total_entries
|
||||
and len(organizations) < query.max_results
|
||||
):
|
||||
while (
|
||||
len(organizations) < query.max_results
|
||||
and query.page < parsed_response.pagination.total_pages
|
||||
and len(parsed_response.organizations) > 0
|
||||
):
|
||||
query.page += 1
|
||||
response = self.requests.get(
|
||||
f"{self.API_URL}/mixed_companies/search",
|
||||
headers=self._get_headers(),
|
||||
params=query.model_dump(exclude={"credentials", "max_results"}),
|
||||
)
|
||||
parsed_response = SearchOrganizationsResponse(**response.json())
|
||||
organizations.extend(
|
||||
parsed_response.organizations[
|
||||
: query.max_results - len(organizations)
|
||||
]
|
||||
)
|
||||
|
||||
logger.info(f"Found {len(organizations)} organizations")
|
||||
return (
|
||||
organizations[: query.max_results] if query.max_results else organizations
|
||||
)
|
||||
@@ -1,35 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
ApolloCredentials = APIKeyCredentials
|
||||
ApolloCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.APOLLO],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="apollo",
|
||||
api_key=SecretStr("mock-apollo-api-key"),
|
||||
title="Mock Apollo API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
def ApolloCredentialsField() -> ApolloCredentialsInput:
|
||||
"""
|
||||
Creates a Apollo credentials input on a block.
|
||||
"""
|
||||
return CredentialsField(
|
||||
description="The Apollo integration can be used with an API Key.",
|
||||
)
|
||||
@@ -1,548 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class PrimaryPhone(BaseModel):
|
||||
"""A primary phone in Apollo"""
|
||||
|
||||
number: str
|
||||
source: str
|
||||
sanitized_number: str
|
||||
|
||||
|
||||
class SenorityLevels(str, Enum):
|
||||
"""Seniority levels in Apollo"""
|
||||
|
||||
OWNER = "owner"
|
||||
FOUNDER = "founder"
|
||||
C_SUITE = "c_suite"
|
||||
PARTNER = "partner"
|
||||
VP = "vp"
|
||||
HEAD = "head"
|
||||
DIRECTOR = "director"
|
||||
MANAGER = "manager"
|
||||
SENIOR = "senior"
|
||||
ENTRY = "entry"
|
||||
INTERN = "intern"
|
||||
|
||||
|
||||
class ContactEmailStatuses(str, Enum):
|
||||
"""Contact email statuses in Apollo"""
|
||||
|
||||
VERIFIED = "verified"
|
||||
UNVERIFIED = "unverified"
|
||||
LIKELY_TO_ENGAGE = "likely_to_engage"
|
||||
UNAVAILABLE = "unavailable"
|
||||
|
||||
|
||||
class RuleConfigStatus(BaseModel):
|
||||
"""A rule config status in Apollo"""
|
||||
|
||||
_id: str
|
||||
created_at: str
|
||||
rule_action_config_id: str
|
||||
rule_config_id: str
|
||||
status_cd: str
|
||||
updated_at: str
|
||||
id: str
|
||||
key: str
|
||||
|
||||
|
||||
class ContactCampaignStatus(BaseModel):
|
||||
"""A contact campaign status in Apollo"""
|
||||
|
||||
id: str
|
||||
emailer_campaign_id: str
|
||||
send_email_from_user_id: str
|
||||
inactive_reason: str
|
||||
status: str
|
||||
added_at: str
|
||||
added_by_user_id: str
|
||||
finished_at: str
|
||||
paused_at: str
|
||||
auto_unpause_at: str
|
||||
send_email_from_email_address: str
|
||||
send_email_from_email_account_id: str
|
||||
manually_set_unpause: str
|
||||
failure_reason: str
|
||||
current_step_id: str
|
||||
in_response_to_emailer_message_id: str
|
||||
cc_emails: str
|
||||
bcc_emails: str
|
||||
to_emails: str
|
||||
|
||||
|
||||
class Account(BaseModel):
|
||||
"""An account in Apollo"""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
website_url: str
|
||||
blog_url: str
|
||||
angellist_url: str
|
||||
linkedin_url: str
|
||||
twitter_url: str
|
||||
facebook_url: str
|
||||
primary_phone: PrimaryPhone
|
||||
languages: list[str]
|
||||
alexa_ranking: int
|
||||
phone: str
|
||||
linkedin_uid: str
|
||||
founded_year: int
|
||||
publicly_traded_symbol: str
|
||||
publicly_traded_exchange: str
|
||||
logo_url: str
|
||||
chrunchbase_url: str
|
||||
primary_domain: str
|
||||
domain: str
|
||||
team_id: str
|
||||
organization_id: str
|
||||
account_stage_id: str
|
||||
source: str
|
||||
original_source: str
|
||||
creator_id: str
|
||||
owner_id: str
|
||||
created_at: str
|
||||
phone_status: str
|
||||
hubspot_id: str
|
||||
salesforce_id: str
|
||||
crm_owner_id: str
|
||||
parent_account_id: str
|
||||
sanitized_phone: str
|
||||
# no listed type on the API docs
|
||||
account_playbook_statues: list[Any]
|
||||
account_rule_config_statuses: list[RuleConfigStatus]
|
||||
existence_level: str
|
||||
label_ids: list[str]
|
||||
typed_custom_fields: Any
|
||||
custom_field_errors: Any
|
||||
modality: str
|
||||
source_display_name: str
|
||||
salesforce_record_id: str
|
||||
crm_record_url: str
|
||||
|
||||
|
||||
class ContactEmail(BaseModel):
|
||||
"""A contact email in Apollo"""
|
||||
|
||||
email: str = ""
|
||||
email_md5: str = ""
|
||||
email_sha256: str = ""
|
||||
email_status: str = ""
|
||||
email_source: str = ""
|
||||
extrapolated_email_confidence: str = ""
|
||||
position: int = 0
|
||||
email_from_customer: str = ""
|
||||
free_domain: bool = True
|
||||
|
||||
|
||||
class EmploymentHistory(BaseModel):
|
||||
"""An employment history in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
|
||||
_id: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
current: Optional[bool] = None
|
||||
degree: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
emails: Optional[str] = None
|
||||
end_date: Optional[str] = None
|
||||
grade_level: Optional[str] = None
|
||||
kind: Optional[str] = None
|
||||
major: Optional[str] = None
|
||||
organization_id: Optional[str] = None
|
||||
organization_name: Optional[str] = None
|
||||
raw_address: Optional[str] = None
|
||||
start_date: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
id: Optional[str] = None
|
||||
key: Optional[str] = None
|
||||
|
||||
|
||||
class Breadcrumb(BaseModel):
|
||||
"""A breadcrumb in Apollo"""
|
||||
|
||||
label: Optional[str] = "N/A"
|
||||
signal_field_name: Optional[str] = "N/A"
|
||||
value: str | list | None = "N/A"
|
||||
display_name: Optional[str] = "N/A"
|
||||
|
||||
|
||||
class TypedCustomField(BaseModel):
|
||||
"""A typed custom field in Apollo"""
|
||||
|
||||
id: Optional[str] = "N/A"
|
||||
value: Optional[str] = "N/A"
|
||||
|
||||
|
||||
class Pagination(BaseModel):
|
||||
"""Pagination in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
|
||||
page: int = 0
|
||||
per_page: int = 0
|
||||
total_entries: int = 0
|
||||
total_pages: int = 0
|
||||
|
||||
|
||||
class DialerFlags(BaseModel):
|
||||
"""A dialer flags in Apollo"""
|
||||
|
||||
country_name: str
|
||||
country_enabled: bool
|
||||
high_risk_calling_enabled: bool
|
||||
potential_high_risk_number: bool
|
||||
|
||||
|
||||
class PhoneNumber(BaseModel):
|
||||
"""A phone number in Apollo"""
|
||||
|
||||
raw_number: str = ""
|
||||
sanitized_number: str = ""
|
||||
type: str = ""
|
||||
position: int = 0
|
||||
status: str = ""
|
||||
dnc_status: str = ""
|
||||
dnc_other_info: str = ""
|
||||
dailer_flags: DialerFlags = DialerFlags(
|
||||
country_name="",
|
||||
country_enabled=True,
|
||||
high_risk_calling_enabled=True,
|
||||
potential_high_risk_number=True,
|
||||
)
|
||||
|
||||
|
||||
class Organization(BaseModel):
|
||||
"""An organization in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
|
||||
id: Optional[str] = "N/A"
|
||||
name: Optional[str] = "N/A"
|
||||
website_url: Optional[str] = "N/A"
|
||||
blog_url: Optional[str] = "N/A"
|
||||
angellist_url: Optional[str] = "N/A"
|
||||
linkedin_url: Optional[str] = "N/A"
|
||||
twitter_url: Optional[str] = "N/A"
|
||||
facebook_url: Optional[str] = "N/A"
|
||||
primary_phone: Optional[PrimaryPhone] = PrimaryPhone(
|
||||
number="N/A", source="N/A", sanitized_number="N/A"
|
||||
)
|
||||
languages: list[str] = []
|
||||
alexa_ranking: Optional[int] = 0
|
||||
phone: Optional[str] = "N/A"
|
||||
linkedin_uid: Optional[str] = "N/A"
|
||||
founded_year: Optional[int] = 0
|
||||
publicly_traded_symbol: Optional[str] = "N/A"
|
||||
publicly_traded_exchange: Optional[str] = "N/A"
|
||||
logo_url: Optional[str] = "N/A"
|
||||
chrunchbase_url: Optional[str] = "N/A"
|
||||
primary_domain: Optional[str] = "N/A"
|
||||
sanitized_phone: Optional[str] = "N/A"
|
||||
owned_by_organization_id: Optional[str] = "N/A"
|
||||
intent_strength: Optional[str] = "N/A"
|
||||
show_intent: bool = True
|
||||
has_intent_signal_account: Optional[bool] = True
|
||||
intent_signal_account: Optional[str] = "N/A"
|
||||
|
||||
|
||||
class Contact(BaseModel):
|
||||
"""A contact in Apollo"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
|
||||
contact_roles: list[Any] = []
|
||||
id: Optional[str] = None
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
linkedin_url: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
contact_stage_id: Optional[str] = None
|
||||
owner_id: Optional[str] = None
|
||||
creator_id: Optional[str] = None
|
||||
person_id: Optional[str] = None
|
||||
email_needs_tickling: bool = True
|
||||
organization_name: Optional[str] = None
|
||||
source: Optional[str] = None
|
||||
original_source: Optional[str] = None
|
||||
organization_id: Optional[str] = None
|
||||
headline: Optional[str] = None
|
||||
photo_url: Optional[str] = None
|
||||
present_raw_address: Optional[str] = None
|
||||
linkededin_uid: Optional[str] = None
|
||||
extrapolated_email_confidence: Optional[float] = None
|
||||
salesforce_id: Optional[str] = None
|
||||
salesforce_lead_id: Optional[str] = None
|
||||
salesforce_contact_id: Optional[str] = None
|
||||
saleforce_account_id: Optional[str] = None
|
||||
crm_owner_id: Optional[str] = None
|
||||
created_at: Optional[str] = None
|
||||
emailer_campaign_ids: list[str] = []
|
||||
direct_dial_status: Optional[str] = None
|
||||
direct_dial_enrichment_failed_at: Optional[str] = None
|
||||
email_status: Optional[str] = None
|
||||
email_source: Optional[str] = None
|
||||
account_id: Optional[str] = None
|
||||
last_activity_date: Optional[str] = None
|
||||
hubspot_vid: Optional[str] = None
|
||||
hubspot_company_id: Optional[str] = None
|
||||
crm_id: Optional[str] = None
|
||||
sanitized_phone: Optional[str] = None
|
||||
merged_crm_ids: Optional[str] = None
|
||||
updated_at: Optional[str] = None
|
||||
queued_for_crm_push: bool = True
|
||||
suggested_from_rule_engine_config_id: Optional[str] = None
|
||||
email_unsubscribed: Optional[str] = None
|
||||
label_ids: list[Any] = []
|
||||
has_pending_email_arcgate_request: bool = True
|
||||
has_email_arcgate_request: bool = True
|
||||
existence_level: Optional[str] = None
|
||||
email: Optional[str] = None
|
||||
email_from_customer: Optional[str] = None
|
||||
typed_custom_fields: list[TypedCustomField] = []
|
||||
custom_field_errors: Any = None
|
||||
salesforce_record_id: Optional[str] = None
|
||||
crm_record_url: Optional[str] = None
|
||||
email_status_unavailable_reason: Optional[str] = None
|
||||
email_true_status: Optional[str] = None
|
||||
updated_email_true_status: bool = True
|
||||
contact_rule_config_statuses: list[RuleConfigStatus] = []
|
||||
source_display_name: Optional[str] = None
|
||||
twitter_url: Optional[str] = None
|
||||
contact_campaign_statuses: list[ContactCampaignStatus] = []
|
||||
state: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
account: Optional[Account] = None
|
||||
contact_emails: list[ContactEmail] = []
|
||||
organization: Optional[Organization] = None
|
||||
employment_history: list[EmploymentHistory] = []
|
||||
time_zone: Optional[str] = None
|
||||
intent_strength: Optional[str] = None
|
||||
show_intent: bool = True
|
||||
phone_numbers: list[PhoneNumber] = []
|
||||
account_phone_note: Optional[str] = None
|
||||
free_domain: bool = True
|
||||
is_likely_to_engage: bool = True
|
||||
email_domain_catchall: bool = True
|
||||
contact_job_change_event: Optional[str] = None
|
||||
|
||||
|
||||
class SearchOrganizationsRequest(BaseModel):
|
||||
"""Request for Apollo's search organizations API"""
|
||||
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default=[0, 1000000],
|
||||
)
|
||||
|
||||
organization_locations: list[str] = SchemaField(
|
||||
description="""The location of the company headquarters. You can search across cities, US states, and countries.
|
||||
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, any Boston-based companies will not appearch in your search results, even if they match other parameters.
|
||||
|
||||
To exclude companies based on location, use the organization_not_locations parameter.
|
||||
""",
|
||||
default_factory=list,
|
||||
)
|
||||
organizations_not_locations: list[str] = SchemaField(
|
||||
description="""Exclude companies from search results based on the location of the company headquarters. You can use cities, US states, and countries as locations to exclude.
|
||||
|
||||
This parameter is useful for ensuring you do not prospect in an undesirable territory. For example, if you use ireland as a value, no Ireland-based companies will appear in your search results.
|
||||
""",
|
||||
default_factory=list,
|
||||
)
|
||||
q_organization_keyword_tags: list[str] = SchemaField(
|
||||
description="""Filter search results based on keywords associated with companies. For example, you can enter mining as a value to return only companies that have an association with the mining industry."""
|
||||
)
|
||||
q_organization_name: str = SchemaField(
|
||||
description="""Filter search results to include a specific company name.
|
||||
|
||||
If the value you enter for this parameter does not match with a company's name, the company will not appear in search results, even if it matches other parameters. Partial matches are accepted. For example, if you filter by the value marketing, a company called NY Marketing Unlimited would still be eligible as a search result, but NY Market Analysis would not be eligible."""
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, identify the values for organization_id when you call this endpoint.""",
|
||||
default_factory=list,
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
default=100,
|
||||
ge=1,
|
||||
le=50000,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
page: int = SchemaField(
|
||||
description="""The page number of the Apollo data that you want to retrieve.
|
||||
|
||||
Use this parameter in combination with the per_page parameter to make search results for navigable and improve the performance of the endpoint.""",
|
||||
default=1,
|
||||
)
|
||||
per_page: int = SchemaField(
|
||||
description="""The number of search results that should be returned for each page. Limited the number of results per page improves the endpoint's performance.
|
||||
|
||||
Use the page parameter to search the different pages of data.""",
|
||||
default=100,
|
||||
)
|
||||
|
||||
|
||||
class SearchOrganizationsResponse(BaseModel):
|
||||
"""Response from Apollo's search organizations API"""
|
||||
|
||||
breadcrumbs: list[Breadcrumb] = []
|
||||
partial_results_only: bool = True
|
||||
has_join: bool = True
|
||||
disable_eu_prospecting: bool = True
|
||||
partial_results_limit: int = 0
|
||||
pagination: Pagination = Pagination(
|
||||
page=0, per_page=0, total_entries=0, total_pages=0
|
||||
)
|
||||
# no listed type on the API docs
|
||||
accounts: list[Any] = []
|
||||
organizations: list[Organization] = []
|
||||
models_ids: list[str] = []
|
||||
num_fetch_result: Optional[str] = "N/A"
|
||||
derived_params: Optional[str] = "N/A"
|
||||
|
||||
|
||||
class SearchPeopleRequest(BaseModel):
|
||||
"""Request for Apollo's search people API"""
|
||||
|
||||
person_titles: list[str] = SchemaField(
|
||||
description="""Job titles held by the people you want to find. For a person to be included in search results, they only need to match 1 of the job titles you add. Adding more job titles expands your search results.
|
||||
|
||||
Results also include job titles with the same terms, even if they are not exact matches. For example, searching for marketing manager might return people with the job title content marketing manager.
|
||||
|
||||
Use this parameter in combination with the person_seniorities[] parameter to find people based on specific job functions and seniority levels.
|
||||
""",
|
||||
default_factory=list,
|
||||
placeholder="marketing manager",
|
||||
)
|
||||
person_locations: list[str] = SchemaField(
|
||||
description="""The location where people live. You can search across cities, US states, and countries.
|
||||
|
||||
To find people based on the headquarters locations of their current employer, use the organization_locations parameter.""",
|
||||
default_factory=list,
|
||||
)
|
||||
person_seniorities: list[SenorityLevels] = SchemaField(
|
||||
description="""The job seniority that people hold within their current employer. This enables you to find people that currently hold positions at certain reporting levels, such as Director level or senior IC level.
|
||||
|
||||
For a person to be included in search results, they only need to match 1 of the seniorities you add. Adding more seniorities expands your search results.
|
||||
|
||||
Searches only return results based on their current job title, so searching for Director-level employees only returns people that currently hold a Director-level title. If someone was previously a Director, but is currently a VP, they would not be included in your search results.
|
||||
|
||||
Use this parameter in combination with the person_titles[] parameter to find people based on specific job functions and seniority levels.""",
|
||||
default_factory=list,
|
||||
)
|
||||
organization_locations: list[str] = SchemaField(
|
||||
description="""The location of the company headquarters for a person's current employer. You can search across cities, US states, and countries.
|
||||
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, people that work for the Boston-based company will not appear in your results, even if they match other parameters.
|
||||
|
||||
To find people based on their personal location, use the person_locations parameter.""",
|
||||
default_factory=list,
|
||||
)
|
||||
q_organization_domains: list[str] = SchemaField(
|
||||
description="""The domain name for the person's employer. This can be the current employer or a previous employer. Do not include www., the @ symbol, or similar.
|
||||
|
||||
You can add multiple domains to search across companies.
|
||||
|
||||
Examples: apollo.io and microsoft.com""",
|
||||
default_factory=list,
|
||||
)
|
||||
contact_email_statuses: list[ContactEmailStatuses] = SchemaField(
|
||||
description="""The email statuses for the people you want to find. You can add multiple statuses to expand your search.""",
|
||||
default_factory=list,
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies (employers) you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, call the Organization Search endpoint and identify the values for organization_id.""",
|
||||
default_factory=list,
|
||||
)
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default_factory=list,
|
||||
)
|
||||
q_keywords: str = SchemaField(
|
||||
description="""A string of words over which we want to filter the results""",
|
||||
default="",
|
||||
)
|
||||
page: int = SchemaField(
|
||||
description="""The page number of the Apollo data that you want to retrieve.
|
||||
|
||||
Use this parameter in combination with the per_page parameter to make search results for navigable and improve the performance of the endpoint.""",
|
||||
default=1,
|
||||
)
|
||||
per_page: int = SchemaField(
|
||||
description="""The number of search results that should be returned for each page. Limited the number of results per page improves the endpoint's performance.
|
||||
|
||||
Use the page parameter to search the different pages of data.""",
|
||||
default=100,
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
default=100,
|
||||
ge=1,
|
||||
le=50000,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
|
||||
class SearchPeopleResponse(BaseModel):
|
||||
"""Response from Apollo's search people API"""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
from_attributes=True,
|
||||
populate_by_name=True,
|
||||
)
|
||||
|
||||
breadcrumbs: list[Breadcrumb] = []
|
||||
partial_results_only: bool = True
|
||||
has_join: bool = True
|
||||
disable_eu_prospecting: bool = True
|
||||
partial_results_limit: int = 0
|
||||
pagination: Pagination = Pagination(
|
||||
page=0, per_page=0, total_entries=0, total_pages=0
|
||||
)
|
||||
contacts: list[Contact] = []
|
||||
people: list[Contact] = []
|
||||
model_ids: list[str] = []
|
||||
num_fetch_result: Optional[str] = "N/A"
|
||||
derived_params: Optional[str] = "N/A"
|
||||
@@ -1,219 +0,0 @@
|
||||
from backend.blocks.apollo._api import ApolloClient
|
||||
from backend.blocks.apollo._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
ApolloCredentials,
|
||||
ApolloCredentialsInput,
|
||||
)
|
||||
from backend.blocks.apollo.models import (
|
||||
Organization,
|
||||
PrimaryPhone,
|
||||
SearchOrganizationsRequest,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class SearchOrganizationsBlock(Block):
|
||||
"""Search for organizations in Apollo"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default=[0, 1000000],
|
||||
)
|
||||
|
||||
organization_locations: list[str] = SchemaField(
|
||||
description="""The location of the company headquarters. You can search across cities, US states, and countries.
|
||||
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, any Boston-based companies will not appearch in your search results, even if they match other parameters.
|
||||
|
||||
To exclude companies based on location, use the organization_not_locations parameter.
|
||||
""",
|
||||
default_factory=list,
|
||||
)
|
||||
organizations_not_locations: list[str] = SchemaField(
|
||||
description="""Exclude companies from search results based on the location of the company headquarters. You can use cities, US states, and countries as locations to exclude.
|
||||
|
||||
This parameter is useful for ensuring you do not prospect in an undesirable territory. For example, if you use ireland as a value, no Ireland-based companies will appear in your search results.
|
||||
""",
|
||||
default_factory=list,
|
||||
)
|
||||
q_organization_keyword_tags: list[str] = SchemaField(
|
||||
description="""Filter search results based on keywords associated with companies. For example, you can enter mining as a value to return only companies that have an association with the mining industry.""",
|
||||
default_factory=list,
|
||||
)
|
||||
q_organization_name: str = SchemaField(
|
||||
description="""Filter search results to include a specific company name.
|
||||
|
||||
If the value you enter for this parameter does not match with a company's name, the company will not appear in search results, even if it matches other parameters. Partial matches are accepted. For example, if you filter by the value marketing, a company called NY Marketing Unlimited would still be eligible as a search result, but NY Market Analysis would not be eligible.""",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, identify the values for organization_id when you call this endpoint.""",
|
||||
default_factory=list,
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
default=100,
|
||||
ge=1,
|
||||
le=50000,
|
||||
advanced=True,
|
||||
)
|
||||
credentials: ApolloCredentialsInput = SchemaField(
|
||||
description="Apollo credentials",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
organizations: list[Organization] = SchemaField(
|
||||
description="List of organizations found",
|
||||
default_factory=list,
|
||||
)
|
||||
organization: Organization = SchemaField(
|
||||
description="Each found organization, one at a time",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the search failed",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3d71270d-599e-4148-9b95-71b35d2f44f0",
|
||||
description="Search for organizations in Apollo",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=SearchOrganizationsBlock.Input,
|
||||
output_schema=SearchOrganizationsBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={"query": "Google", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_output=[
|
||||
(
|
||||
"organization",
|
||||
Organization(
|
||||
id="1",
|
||||
name="Google",
|
||||
website_url="https://google.com",
|
||||
blog_url="https://google.com/blog",
|
||||
angellist_url="https://angel.co/google",
|
||||
linkedin_url="https://linkedin.com/company/google",
|
||||
twitter_url="https://twitter.com/google",
|
||||
facebook_url="https://facebook.com/google",
|
||||
primary_phone=PrimaryPhone(
|
||||
source="google",
|
||||
number="1234567890",
|
||||
sanitized_number="1234567890",
|
||||
),
|
||||
languages=["en"],
|
||||
alexa_ranking=1000,
|
||||
phone="1234567890",
|
||||
linkedin_uid="1234567890",
|
||||
founded_year=2000,
|
||||
publicly_traded_symbol="GOOGL",
|
||||
publicly_traded_exchange="NASDAQ",
|
||||
logo_url="https://google.com/logo.png",
|
||||
chrunchbase_url="https://chrunchbase.com/google",
|
||||
primary_domain="google.com",
|
||||
sanitized_phone="1234567890",
|
||||
owned_by_organization_id="1",
|
||||
intent_strength="strong",
|
||||
show_intent=True,
|
||||
has_intent_signal_account=True,
|
||||
intent_signal_account="1",
|
||||
),
|
||||
),
|
||||
(
|
||||
"organizations",
|
||||
[
|
||||
Organization(
|
||||
id="1",
|
||||
name="Google",
|
||||
website_url="https://google.com",
|
||||
blog_url="https://google.com/blog",
|
||||
angellist_url="https://angel.co/google",
|
||||
linkedin_url="https://linkedin.com/company/google",
|
||||
twitter_url="https://twitter.com/google",
|
||||
facebook_url="https://facebook.com/google",
|
||||
primary_phone=PrimaryPhone(
|
||||
source="google",
|
||||
number="1234567890",
|
||||
sanitized_number="1234567890",
|
||||
),
|
||||
languages=["en"],
|
||||
alexa_ranking=1000,
|
||||
phone="1234567890",
|
||||
linkedin_uid="1234567890",
|
||||
founded_year=2000,
|
||||
publicly_traded_symbol="GOOGL",
|
||||
publicly_traded_exchange="NASDAQ",
|
||||
logo_url="https://google.com/logo.png",
|
||||
chrunchbase_url="https://chrunchbase.com/google",
|
||||
primary_domain="google.com",
|
||||
sanitized_phone="1234567890",
|
||||
owned_by_organization_id="1",
|
||||
intent_strength="strong",
|
||||
show_intent=True,
|
||||
has_intent_signal_account=True,
|
||||
intent_signal_account="1",
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"search_organizations": lambda *args, **kwargs: [
|
||||
Organization(
|
||||
id="1",
|
||||
name="Google",
|
||||
website_url="https://google.com",
|
||||
blog_url="https://google.com/blog",
|
||||
angellist_url="https://angel.co/google",
|
||||
linkedin_url="https://linkedin.com/company/google",
|
||||
twitter_url="https://twitter.com/google",
|
||||
facebook_url="https://facebook.com/google",
|
||||
primary_phone=PrimaryPhone(
|
||||
source="google",
|
||||
number="1234567890",
|
||||
sanitized_number="1234567890",
|
||||
),
|
||||
languages=["en"],
|
||||
alexa_ranking=1000,
|
||||
phone="1234567890",
|
||||
linkedin_uid="1234567890",
|
||||
founded_year=2000,
|
||||
publicly_traded_symbol="GOOGL",
|
||||
publicly_traded_exchange="NASDAQ",
|
||||
logo_url="https://google.com/logo.png",
|
||||
chrunchbase_url="https://chrunchbase.com/google",
|
||||
primary_domain="google.com",
|
||||
sanitized_phone="1234567890",
|
||||
owned_by_organization_id="1",
|
||||
intent_strength="strong",
|
||||
show_intent=True,
|
||||
has_intent_signal_account=True,
|
||||
intent_signal_account="1",
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_organizations(
|
||||
query: SearchOrganizationsRequest, credentials: ApolloCredentials
|
||||
) -> list[Organization]:
|
||||
client = ApolloClient(credentials)
|
||||
return client.search_organizations(query)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: ApolloCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
query = SearchOrganizationsRequest(
|
||||
**input_data.model_dump(exclude={"credentials"})
|
||||
)
|
||||
organizations = self.search_organizations(query, credentials)
|
||||
for organization in organizations:
|
||||
yield "organization", organization
|
||||
yield "organizations", organizations
|
||||
@@ -1,394 +0,0 @@
|
||||
from backend.blocks.apollo._api import ApolloClient
|
||||
from backend.blocks.apollo._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
ApolloCredentials,
|
||||
ApolloCredentialsInput,
|
||||
)
|
||||
from backend.blocks.apollo.models import (
|
||||
Contact,
|
||||
ContactEmailStatuses,
|
||||
SearchPeopleRequest,
|
||||
SenorityLevels,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class SearchPeopleBlock(Block):
|
||||
"""Search for people in Apollo"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
person_titles: list[str] = SchemaField(
|
||||
description="""Job titles held by the people you want to find. For a person to be included in search results, they only need to match 1 of the job titles you add. Adding more job titles expands your search results.
|
||||
|
||||
Results also include job titles with the same terms, even if they are not exact matches. For example, searching for marketing manager might return people with the job title content marketing manager.
|
||||
|
||||
Use this parameter in combination with the person_seniorities[] parameter to find people based on specific job functions and seniority levels.
|
||||
""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
person_locations: list[str] = SchemaField(
|
||||
description="""The location where people live. You can search across cities, US states, and countries.
|
||||
|
||||
To find people based on the headquarters locations of their current employer, use the organization_locations parameter.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
person_seniorities: list[SenorityLevels] = SchemaField(
|
||||
description="""The job seniority that people hold within their current employer. This enables you to find people that currently hold positions at certain reporting levels, such as Director level or senior IC level.
|
||||
|
||||
For a person to be included in search results, they only need to match 1 of the seniorities you add. Adding more seniorities expands your search results.
|
||||
|
||||
Searches only return results based on their current job title, so searching for Director-level employees only returns people that currently hold a Director-level title. If someone was previously a Director, but is currently a VP, they would not be included in your search results.
|
||||
|
||||
Use this parameter in combination with the person_titles[] parameter to find people based on specific job functions and seniority levels.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
organization_locations: list[str] = SchemaField(
|
||||
description="""The location of the company headquarters for a person's current employer. You can search across cities, US states, and countries.
|
||||
|
||||
If a company has several office locations, results are still based on the headquarters location. For example, if you search chicago but a company's HQ location is in boston, people that work for the Boston-based company will not appear in your results, even if they match other parameters.
|
||||
|
||||
To find people based on their personal location, use the person_locations parameter.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
q_organization_domains: list[str] = SchemaField(
|
||||
description="""The domain name for the person's employer. This can be the current employer or a previous employer. Do not include www., the @ symbol, or similar.
|
||||
|
||||
You can add multiple domains to search across companies.
|
||||
|
||||
Examples: apollo.io and microsoft.com""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
contact_email_statuses: list[ContactEmailStatuses] = SchemaField(
|
||||
description="""The email statuses for the people you want to find. You can add multiple statuses to expand your search.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
organization_ids: list[str] = SchemaField(
|
||||
description="""The Apollo IDs for the companies (employers) you want to include in your search results. Each company in the Apollo database is assigned a unique ID.
|
||||
|
||||
To find IDs, call the Organization Search endpoint and identify the values for organization_id.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
organization_num_empoloyees_range: list[int] = SchemaField(
|
||||
description="""The number range of employees working for the company. This enables you to find companies based on headcount. You can add multiple ranges to expand your search results.
|
||||
|
||||
Each range you add needs to be a string, with the upper and lower numbers of the range separated only by a comma.""",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
q_keywords: str = SchemaField(
|
||||
description="""A string of words over which we want to filter the results""",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
max_results: int = SchemaField(
|
||||
description="""The maximum number of results to return. If you don't specify this parameter, the default is 100.""",
|
||||
default=100,
|
||||
ge=1,
|
||||
le=50000,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
credentials: ApolloCredentialsInput = SchemaField(
|
||||
description="Apollo credentials",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
people: list[Contact] = SchemaField(
|
||||
description="List of people found",
|
||||
default_factory=list,
|
||||
)
|
||||
person: Contact = SchemaField(
|
||||
description="Each found person, one at a time",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the search failed",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c2adb3aa-5aae-488d-8a6e-4eb8c23e2ed6",
|
||||
description="Search for people in Apollo",
|
||||
categories={BlockCategory.SEARCH},
|
||||
input_schema=SearchPeopleBlock.Input,
|
||||
output_schema=SearchPeopleBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_output=[
|
||||
(
|
||||
"person",
|
||||
Contact(
|
||||
contact_roles=[],
|
||||
id="1",
|
||||
name="John Doe",
|
||||
first_name="John",
|
||||
last_name="Doe",
|
||||
linkedin_url="https://www.linkedin.com/in/johndoe",
|
||||
title="Software Engineer",
|
||||
organization_name="Google",
|
||||
organization_id="123456",
|
||||
contact_stage_id="1",
|
||||
owner_id="1",
|
||||
creator_id="1",
|
||||
person_id="1",
|
||||
email_needs_tickling=True,
|
||||
source="apollo",
|
||||
original_source="apollo",
|
||||
headline="Software Engineer",
|
||||
photo_url="https://www.linkedin.com/in/johndoe",
|
||||
present_raw_address="123 Main St, Anytown, USA",
|
||||
linkededin_uid="123456",
|
||||
extrapolated_email_confidence=0.8,
|
||||
salesforce_id="123456",
|
||||
salesforce_lead_id="123456",
|
||||
salesforce_contact_id="123456",
|
||||
saleforce_account_id="123456",
|
||||
crm_owner_id="123456",
|
||||
created_at="2021-01-01",
|
||||
emailer_campaign_ids=[],
|
||||
direct_dial_status="active",
|
||||
direct_dial_enrichment_failed_at="2021-01-01",
|
||||
email_status="active",
|
||||
email_source="apollo",
|
||||
account_id="123456",
|
||||
last_activity_date="2021-01-01",
|
||||
hubspot_vid="123456",
|
||||
hubspot_company_id="123456",
|
||||
crm_id="123456",
|
||||
sanitized_phone="123456",
|
||||
merged_crm_ids="123456",
|
||||
updated_at="2021-01-01",
|
||||
queued_for_crm_push=True,
|
||||
suggested_from_rule_engine_config_id="123456",
|
||||
email_unsubscribed=None,
|
||||
label_ids=[],
|
||||
has_pending_email_arcgate_request=True,
|
||||
has_email_arcgate_request=True,
|
||||
existence_level=None,
|
||||
email=None,
|
||||
email_from_customer=None,
|
||||
typed_custom_fields=[],
|
||||
custom_field_errors=None,
|
||||
salesforce_record_id=None,
|
||||
crm_record_url=None,
|
||||
email_status_unavailable_reason=None,
|
||||
email_true_status=None,
|
||||
updated_email_true_status=True,
|
||||
contact_rule_config_statuses=[],
|
||||
source_display_name=None,
|
||||
twitter_url=None,
|
||||
contact_campaign_statuses=[],
|
||||
state=None,
|
||||
city=None,
|
||||
country=None,
|
||||
account=None,
|
||||
contact_emails=[],
|
||||
organization=None,
|
||||
employment_history=[],
|
||||
time_zone=None,
|
||||
intent_strength=None,
|
||||
show_intent=True,
|
||||
phone_numbers=[],
|
||||
account_phone_note=None,
|
||||
free_domain=True,
|
||||
is_likely_to_engage=True,
|
||||
email_domain_catchall=True,
|
||||
contact_job_change_event=None,
|
||||
),
|
||||
),
|
||||
(
|
||||
"people",
|
||||
[
|
||||
Contact(
|
||||
contact_roles=[],
|
||||
id="1",
|
||||
name="John Doe",
|
||||
first_name="John",
|
||||
last_name="Doe",
|
||||
linkedin_url="https://www.linkedin.com/in/johndoe",
|
||||
title="Software Engineer",
|
||||
organization_name="Google",
|
||||
organization_id="123456",
|
||||
contact_stage_id="1",
|
||||
owner_id="1",
|
||||
creator_id="1",
|
||||
person_id="1",
|
||||
email_needs_tickling=True,
|
||||
source="apollo",
|
||||
original_source="apollo",
|
||||
headline="Software Engineer",
|
||||
photo_url="https://www.linkedin.com/in/johndoe",
|
||||
present_raw_address="123 Main St, Anytown, USA",
|
||||
linkededin_uid="123456",
|
||||
extrapolated_email_confidence=0.8,
|
||||
salesforce_id="123456",
|
||||
salesforce_lead_id="123456",
|
||||
salesforce_contact_id="123456",
|
||||
saleforce_account_id="123456",
|
||||
crm_owner_id="123456",
|
||||
created_at="2021-01-01",
|
||||
emailer_campaign_ids=[],
|
||||
direct_dial_status="active",
|
||||
direct_dial_enrichment_failed_at="2021-01-01",
|
||||
email_status="active",
|
||||
email_source="apollo",
|
||||
account_id="123456",
|
||||
last_activity_date="2021-01-01",
|
||||
hubspot_vid="123456",
|
||||
hubspot_company_id="123456",
|
||||
crm_id="123456",
|
||||
sanitized_phone="123456",
|
||||
merged_crm_ids="123456",
|
||||
updated_at="2021-01-01",
|
||||
queued_for_crm_push=True,
|
||||
suggested_from_rule_engine_config_id="123456",
|
||||
email_unsubscribed=None,
|
||||
label_ids=[],
|
||||
has_pending_email_arcgate_request=True,
|
||||
has_email_arcgate_request=True,
|
||||
existence_level=None,
|
||||
email=None,
|
||||
email_from_customer=None,
|
||||
typed_custom_fields=[],
|
||||
custom_field_errors=None,
|
||||
salesforce_record_id=None,
|
||||
crm_record_url=None,
|
||||
email_status_unavailable_reason=None,
|
||||
email_true_status=None,
|
||||
updated_email_true_status=True,
|
||||
contact_rule_config_statuses=[],
|
||||
source_display_name=None,
|
||||
twitter_url=None,
|
||||
contact_campaign_statuses=[],
|
||||
state=None,
|
||||
city=None,
|
||||
country=None,
|
||||
account=None,
|
||||
contact_emails=[],
|
||||
organization=None,
|
||||
employment_history=[],
|
||||
time_zone=None,
|
||||
intent_strength=None,
|
||||
show_intent=True,
|
||||
phone_numbers=[],
|
||||
account_phone_note=None,
|
||||
free_domain=True,
|
||||
is_likely_to_engage=True,
|
||||
email_domain_catchall=True,
|
||||
contact_job_change_event=None,
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"search_people": lambda query, credentials: [
|
||||
Contact(
|
||||
id="1",
|
||||
name="John Doe",
|
||||
first_name="John",
|
||||
last_name="Doe",
|
||||
linkedin_url="https://www.linkedin.com/in/johndoe",
|
||||
title="Software Engineer",
|
||||
organization_name="Google",
|
||||
organization_id="123456",
|
||||
contact_stage_id="1",
|
||||
owner_id="1",
|
||||
creator_id="1",
|
||||
person_id="1",
|
||||
email_needs_tickling=True,
|
||||
source="apollo",
|
||||
original_source="apollo",
|
||||
headline="Software Engineer",
|
||||
photo_url="https://www.linkedin.com/in/johndoe",
|
||||
present_raw_address="123 Main St, Anytown, USA",
|
||||
linkededin_uid="123456",
|
||||
extrapolated_email_confidence=0.8,
|
||||
salesforce_id="123456",
|
||||
salesforce_lead_id="123456",
|
||||
salesforce_contact_id="123456",
|
||||
saleforce_account_id="123456",
|
||||
crm_owner_id="123456",
|
||||
created_at="2021-01-01",
|
||||
emailer_campaign_ids=[],
|
||||
direct_dial_status="active",
|
||||
direct_dial_enrichment_failed_at="2021-01-01",
|
||||
email_status="active",
|
||||
email_source="apollo",
|
||||
account_id="123456",
|
||||
last_activity_date="2021-01-01",
|
||||
hubspot_vid="123456",
|
||||
hubspot_company_id="123456",
|
||||
crm_id="123456",
|
||||
sanitized_phone="123456",
|
||||
merged_crm_ids="123456",
|
||||
updated_at="2021-01-01",
|
||||
queued_for_crm_push=True,
|
||||
suggested_from_rule_engine_config_id="123456",
|
||||
email_unsubscribed=None,
|
||||
label_ids=[],
|
||||
has_pending_email_arcgate_request=True,
|
||||
has_email_arcgate_request=True,
|
||||
existence_level=None,
|
||||
email=None,
|
||||
email_from_customer=None,
|
||||
typed_custom_fields=[],
|
||||
custom_field_errors=None,
|
||||
salesforce_record_id=None,
|
||||
crm_record_url=None,
|
||||
email_status_unavailable_reason=None,
|
||||
email_true_status=None,
|
||||
updated_email_true_status=True,
|
||||
contact_rule_config_statuses=[],
|
||||
source_display_name=None,
|
||||
twitter_url=None,
|
||||
contact_campaign_statuses=[],
|
||||
state=None,
|
||||
city=None,
|
||||
country=None,
|
||||
account=None,
|
||||
contact_emails=[],
|
||||
organization=None,
|
||||
employment_history=[],
|
||||
time_zone=None,
|
||||
intent_strength=None,
|
||||
show_intent=True,
|
||||
phone_numbers=[],
|
||||
account_phone_note=None,
|
||||
free_domain=True,
|
||||
is_likely_to_engage=True,
|
||||
email_domain_catchall=True,
|
||||
contact_job_change_event=None,
|
||||
),
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_people(
|
||||
query: SearchPeopleRequest, credentials: ApolloCredentials
|
||||
) -> list[Contact]:
|
||||
client = ApolloClient(credentials)
|
||||
return client.search_people(query)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: ApolloCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
|
||||
query = SearchPeopleRequest(**input_data.model_dump(exclude={"credentials"}))
|
||||
people = self.search_people(query, credentials)
|
||||
for person in people:
|
||||
yield "person", person
|
||||
yield "people", people
|
||||
@@ -1,482 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from backend.util.request import Requests
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
settings = Settings()
|
||||
|
||||
|
||||
class AyrshareAPIException(Exception):
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class SocialPlatform(str, Enum):
|
||||
BLUESKY = "bluesky"
|
||||
FACEBOOK = "facebook"
|
||||
TWITTER = "twitter"
|
||||
LINKEDIN = "linkedin"
|
||||
INSTAGRAM = "instagram"
|
||||
YOUTUBE = "youtube"
|
||||
REDDIT = "reddit"
|
||||
TELEGRAM = "telegram"
|
||||
GMB = "gmb"
|
||||
PINTEREST = "pinterest"
|
||||
TIKTOK = "tiktok"
|
||||
|
||||
|
||||
@dataclass
|
||||
class EmailConfig:
|
||||
to: str
|
||||
subject: Optional[str] = None
|
||||
body: Optional[str] = None
|
||||
from_name: Optional[str] = None
|
||||
from_email: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class JWTResponse:
|
||||
status: str
|
||||
title: str
|
||||
token: str
|
||||
url: str
|
||||
emailSent: Optional[bool] = None
|
||||
expiresIn: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProfileResponse:
|
||||
status: str
|
||||
title: str
|
||||
refId: str
|
||||
profileKey: str
|
||||
messagingActive: Optional[bool] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PostResponse:
|
||||
status: str
|
||||
id: str
|
||||
refId: str
|
||||
profileTitle: str
|
||||
post: str
|
||||
postIds: Optional[List[Dict[str, Any]]] = None
|
||||
scheduleDate: Optional[str] = None
|
||||
errors: Optional[List[str]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoHashtag:
|
||||
max: Optional[int] = None
|
||||
position: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class FirstComment:
|
||||
text: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoSchedule:
|
||||
interval: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
startDate: Optional[str] = None
|
||||
endDate: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AutoRepost:
|
||||
interval: str
|
||||
platforms: Optional[List[SocialPlatform]] = None
|
||||
startDate: Optional[str] = None
|
||||
endDate: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class PostError:
|
||||
code: int
|
||||
message: str
|
||||
details: str
|
||||
|
||||
|
||||
class AyrshareClient:
|
||||
"""Client for the Ayrshare Social Media Post API"""
|
||||
|
||||
API_URL = "https://api.ayrshare.com/api"
|
||||
POST_ENDPOINT = f"{API_URL}/post"
|
||||
PROFILES_ENDPOINT = f"{API_URL}/profiles"
|
||||
JWT_ENDPOINT = f"{PROFILES_ENDPOINT}/generateJWT"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
headers: Dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": f"Bearer {settings.secrets.ayrshare_api_key}",
|
||||
}
|
||||
self.headers = headers
|
||||
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
trusted_origins=["https://api.ayrshare.com"],
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
def generate_jwt(
|
||||
self,
|
||||
private_key: str,
|
||||
profile_key: str,
|
||||
logout: Optional[bool] = None,
|
||||
redirect: Optional[str] = None,
|
||||
allowed_social: Optional[List[SocialPlatform]] = None,
|
||||
verify: Optional[bool] = None,
|
||||
base64: Optional[bool] = None,
|
||||
expires_in: Optional[int] = None,
|
||||
email: Optional[EmailConfig] = None,
|
||||
) -> JWTResponse:
|
||||
"""
|
||||
Generate a JSON Web Token (JWT) for use with single sign on.
|
||||
|
||||
Args:
|
||||
domain: Domain of app. Must match the domain given during onboarding.
|
||||
private_key: Private Key used for encryption.
|
||||
profile_key: User Profile Key (not the API Key).
|
||||
logout: Automatically logout the current session.
|
||||
redirect: URL to redirect to when the "Done" button or logo is clicked.
|
||||
allowed_social: List of social networks to display in the linking page.
|
||||
verify: Verify that the generated token is valid (recommended for non-production).
|
||||
base64: Whether the private key is base64 encoded.
|
||||
expires_in: Token longevity in minutes (1-2880).
|
||||
email: Configuration for sending Connect Accounts email.
|
||||
|
||||
Returns:
|
||||
JWTResponse object containing the JWT token and URL.
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails or private key is invalid.
|
||||
"""
|
||||
payload: Dict[str, Any] = {
|
||||
"domain": "id-pojeg",
|
||||
"privateKey": private_key,
|
||||
"profileKey": profile_key,
|
||||
}
|
||||
|
||||
headers = self.headers
|
||||
headers["Profile-Key"] = profile_key
|
||||
if logout is not None:
|
||||
payload["logout"] = logout
|
||||
if redirect is not None:
|
||||
payload["redirect"] = redirect
|
||||
if allowed_social is not None:
|
||||
payload["allowedSocial"] = [p.value for p in allowed_social]
|
||||
if verify is not None:
|
||||
payload["verify"] = verify
|
||||
if base64 is not None:
|
||||
payload["base64"] = base64
|
||||
if expires_in is not None:
|
||||
payload["expiresIn"] = expires_in
|
||||
if email is not None:
|
||||
payload["email"] = email.__dict__
|
||||
|
||||
response = self._requests.post(self.JWT_ENDPOINT, json=payload, headers=headers)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return JWTResponse(**response_data)
|
||||
|
||||
def create_profile(
|
||||
self,
|
||||
title: str,
|
||||
messaging_active: Optional[bool] = None,
|
||||
hide_top_header: Optional[bool] = None,
|
||||
top_header: Optional[str] = None,
|
||||
disable_social: Optional[List[SocialPlatform]] = None,
|
||||
team: Optional[bool] = None,
|
||||
email: Optional[str] = None,
|
||||
sub_header: Optional[str] = None,
|
||||
tags: Optional[List[str]] = None,
|
||||
) -> ProfileResponse | PostError:
|
||||
"""
|
||||
Create a new User Profile under your Primary Profile.
|
||||
|
||||
Args:
|
||||
title: Title of the new profile. Must be unique.
|
||||
messaging_active: Set to true to activate messaging for this user profile.
|
||||
hide_top_header: Hide the top header on the social accounts linkage page.
|
||||
top_header: Change the header on the social accounts linkage page.
|
||||
disable_social: Array of social networks that are disabled for this user's profile.
|
||||
team: Create a new user profile as a team member.
|
||||
email: Email address for team member invite (required if team is true).
|
||||
sub_header: Change the sub header on the social accounts linkage page.
|
||||
tags: Array of strings to tag user profiles.
|
||||
|
||||
Returns:
|
||||
ProfileResponse object containing the profile details and profile key.
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails or profile title already exists.
|
||||
"""
|
||||
payload: Dict[str, Any] = {
|
||||
"title": title,
|
||||
}
|
||||
|
||||
if messaging_active is not None:
|
||||
payload["messagingActive"] = messaging_active
|
||||
if hide_top_header is not None:
|
||||
payload["hideTopHeader"] = hide_top_header
|
||||
if top_header is not None:
|
||||
payload["topHeader"] = top_header
|
||||
if disable_social is not None:
|
||||
payload["disableSocial"] = [p.value for p in disable_social]
|
||||
if team is not None:
|
||||
payload["team"] = team
|
||||
if email is not None:
|
||||
payload["email"] = email
|
||||
if sub_header is not None:
|
||||
payload["subHeader"] = sub_header
|
||||
if tags is not None:
|
||||
payload["tags"] = tags
|
||||
|
||||
response = self._requests.post(self.PROFILES_ENDPOINT, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return ProfileResponse(**response_data)
|
||||
|
||||
def create_post(
|
||||
self,
|
||||
post: str,
|
||||
platforms: List[SocialPlatform],
|
||||
media_urls: Optional[List[str]] = None,
|
||||
is_video: Optional[bool] = None,
|
||||
schedule_date: Optional[str] = None,
|
||||
first_comment: Optional[FirstComment] = None,
|
||||
disable_comments: Optional[bool] = None,
|
||||
shorten_links: Optional[bool] = None,
|
||||
auto_schedule: Optional[AutoSchedule] = None,
|
||||
auto_repost: Optional[AutoRepost] = None,
|
||||
auto_hashtag: Optional[Union[AutoHashtag, bool]] = None,
|
||||
unsplash: Optional[str] = None,
|
||||
bluesky_options: Optional[Dict[str, Any]] = None,
|
||||
facebook_options: Optional[Dict[str, Any]] = None,
|
||||
gmb_options: Optional[Dict[str, Any]] = None,
|
||||
instagram_options: Optional[Dict[str, Any]] = None,
|
||||
linkedin_options: Optional[Dict[str, Any]] = None,
|
||||
pinterest_options: Optional[Dict[str, Any]] = None,
|
||||
reddit_options: Optional[Dict[str, Any]] = None,
|
||||
telegram_options: Optional[Dict[str, Any]] = None,
|
||||
threads_options: Optional[Dict[str, Any]] = None,
|
||||
tiktok_options: Optional[Dict[str, Any]] = None,
|
||||
twitter_options: Optional[Dict[str, Any]] = None,
|
||||
youtube_options: Optional[Dict[str, Any]] = None,
|
||||
requires_approval: Optional[bool] = None,
|
||||
random_post: Optional[bool] = None,
|
||||
random_media_url: Optional[bool] = None,
|
||||
idempotency_key: Optional[str] = None,
|
||||
notes: Optional[str] = None,
|
||||
profile_key: Optional[str] = None,
|
||||
) -> PostResponse | PostError:
|
||||
"""
|
||||
Create a post across multiple social media platforms.
|
||||
|
||||
Args:
|
||||
post: The post text to be published
|
||||
platforms: List of platforms to post to (e.g. [SocialPlatform.TWITTER, SocialPlatform.FACEBOOK])
|
||||
media_urls: Optional list of media URLs to include
|
||||
is_video: Whether the media is a video
|
||||
schedule_date: UTC datetime for scheduling (YYYY-MM-DDThh:mm:ssZ)
|
||||
first_comment: Configuration for first comment
|
||||
disable_comments: Whether to disable comments
|
||||
shorten_links: Whether to shorten links
|
||||
auto_schedule: Configuration for automatic scheduling
|
||||
auto_repost: Configuration for automatic reposting
|
||||
auto_hashtag: Configuration for automatic hashtags
|
||||
unsplash: Unsplash image configuration
|
||||
bluesky_options: Bluesky-specific options
|
||||
facebook_options: Facebook-specific options
|
||||
gmb_options: Google Business Profile options
|
||||
instagram_options: Instagram-specific options
|
||||
linkedin_options: LinkedIn-specific options
|
||||
pinterest_options: Pinterest-specific options
|
||||
reddit_options: Reddit-specific options
|
||||
telegram_options: Telegram-specific options
|
||||
threads_options: Threads-specific options
|
||||
tiktok_options: TikTok-specific options
|
||||
twitter_options: Twitter-specific options
|
||||
youtube_options: YouTube-specific options
|
||||
requires_approval: Whether to enable approval workflow
|
||||
random_post: Whether to generate random post text
|
||||
random_media_url: Whether to generate random media
|
||||
idempotency_key: Unique ID for the post
|
||||
notes: Additional notes for the post
|
||||
|
||||
Returns:
|
||||
PostResponse object containing the post details and status
|
||||
|
||||
Raises:
|
||||
AyrshareAPIException: If the API request fails
|
||||
"""
|
||||
|
||||
payload: Dict[str, Any] = {
|
||||
"post": post,
|
||||
"platforms": [p.value for p in platforms],
|
||||
}
|
||||
|
||||
# Add optional parameters if provided
|
||||
if media_urls:
|
||||
payload["mediaUrls"] = media_urls
|
||||
if is_video is not None:
|
||||
payload["isVideo"] = is_video
|
||||
if schedule_date:
|
||||
payload["scheduleDate"] = schedule_date
|
||||
if first_comment:
|
||||
first_comment_dict = first_comment.__dict__.copy()
|
||||
if first_comment.platforms:
|
||||
first_comment_dict["platforms"] = [
|
||||
p.value for p in first_comment.platforms
|
||||
]
|
||||
payload["firstComment"] = first_comment_dict
|
||||
if disable_comments is not None:
|
||||
payload["disableComments"] = disable_comments
|
||||
if shorten_links is not None:
|
||||
payload["shortenLinks"] = shorten_links
|
||||
if auto_schedule:
|
||||
auto_schedule_dict = auto_schedule.__dict__.copy()
|
||||
if auto_schedule.platforms:
|
||||
auto_schedule_dict["platforms"] = [
|
||||
p.value for p in auto_schedule.platforms
|
||||
]
|
||||
payload["autoSchedule"] = auto_schedule_dict
|
||||
if auto_repost:
|
||||
auto_repost_dict = auto_repost.__dict__.copy()
|
||||
if auto_repost.platforms:
|
||||
auto_repost_dict["platforms"] = [p.value for p in auto_repost.platforms]
|
||||
payload["autoRepost"] = auto_repost_dict
|
||||
if auto_hashtag:
|
||||
payload["autoHashtag"] = (
|
||||
auto_hashtag.__dict__
|
||||
if isinstance(auto_hashtag, AutoHashtag)
|
||||
else auto_hashtag
|
||||
)
|
||||
if unsplash:
|
||||
payload["unsplash"] = unsplash
|
||||
if bluesky_options:
|
||||
payload["blueskyOptions"] = bluesky_options
|
||||
if facebook_options:
|
||||
payload["faceBookOptions"] = facebook_options
|
||||
if gmb_options:
|
||||
payload["gmbOptions"] = gmb_options
|
||||
if instagram_options:
|
||||
payload["instagramOptions"] = instagram_options
|
||||
if linkedin_options:
|
||||
payload["linkedInOptions"] = linkedin_options
|
||||
if pinterest_options:
|
||||
payload["pinterestOptions"] = pinterest_options
|
||||
if reddit_options:
|
||||
payload["redditOptions"] = reddit_options
|
||||
if telegram_options:
|
||||
payload["telegramOptions"] = telegram_options
|
||||
if threads_options:
|
||||
payload["threadsOptions"] = threads_options
|
||||
if tiktok_options:
|
||||
payload["tikTokOptions"] = tiktok_options
|
||||
if twitter_options:
|
||||
payload["twitterOptions"] = twitter_options
|
||||
if youtube_options:
|
||||
payload["youTubeOptions"] = youtube_options
|
||||
if requires_approval is not None:
|
||||
payload["requiresApproval"] = requires_approval
|
||||
if random_post is not None:
|
||||
payload["randomPost"] = random_post
|
||||
if random_media_url is not None:
|
||||
payload["randomMediaUrl"] = random_media_url
|
||||
if idempotency_key:
|
||||
payload["idempotencyKey"] = idempotency_key
|
||||
if notes:
|
||||
payload["notes"] = notes
|
||||
|
||||
headers = self.headers
|
||||
if profile_key:
|
||||
headers["Profile-Key"] = profile_key
|
||||
|
||||
response = self._requests.post(
|
||||
self.POST_ENDPOINT, json=payload, headers=headers
|
||||
)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", response.text)
|
||||
error_code = error_data.get("code", response.status_code)
|
||||
error_details = error_data.get("details", {})
|
||||
logger.error(error_data)
|
||||
return PostError(
|
||||
code=error_code,
|
||||
message=error_message,
|
||||
details=error_details,
|
||||
)
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if response_data.get("status") != "success":
|
||||
raise AyrshareAPIException(
|
||||
f"Ayrshare API returned error: {response_data.get('message', 'Unknown error')}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
# Return the first post from the response
|
||||
return PostResponse(**response_data["posts"][0])
|
||||
@@ -1,531 +0,0 @@
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.blocks.ayrshare._api import (
|
||||
AyrshareClient,
|
||||
PostError,
|
||||
PostResponse,
|
||||
SocialPlatform,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.credentials_store import IntegrationCredentialsStore
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
creads_store = IntegrationCredentialsStore()
|
||||
|
||||
|
||||
class RequestOutput(BaseModel):
|
||||
"""Base output model for Ayrshare social media posts."""
|
||||
|
||||
status: str = Field(..., description="Status of the post")
|
||||
id: str = Field(..., description="ID of the post")
|
||||
refId: str = Field(..., description="Reference ID of the post")
|
||||
profileTitle: str = Field(..., description="Title of the profile")
|
||||
post: str = Field(..., description="The post text")
|
||||
postIds: Optional[List[dict]] = Field(
|
||||
description="IDs of the posts on each platform"
|
||||
)
|
||||
scheduleDate: Optional[str] = Field(description="Scheduled date of the post")
|
||||
errors: Optional[List[str]] = Field(description="Any errors that occurred")
|
||||
|
||||
|
||||
class AyrsharePostBlockBase(Block):
|
||||
"""Base class for Ayrshare social media posting blocks."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Base input model for Ayrshare social media posts."""
|
||||
|
||||
post: str = SchemaField(
|
||||
description="The post text to be published", default="", advanced=False
|
||||
)
|
||||
media_urls: List[str] = SchemaField(
|
||||
description="Optional list of media URLs to include. Set is_video in advanced settings to true if you want to upload videos.",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
is_video: bool = SchemaField(
|
||||
description="Whether the media is a video", default=False, advanced=True
|
||||
)
|
||||
schedule_date: Optional[datetime] = SchemaField(
|
||||
description="UTC datetime for scheduling (YYYY-MM-DDThh:mm:ssZ)",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
disable_comments: bool = SchemaField(
|
||||
description="Whether to disable comments", default=False, advanced=True
|
||||
)
|
||||
shorten_links: bool = SchemaField(
|
||||
description="Whether to shorten links", default=False, advanced=True
|
||||
)
|
||||
|
||||
unsplash: Optional[str] = SchemaField(
|
||||
description="Unsplash image configuration", default=None, advanced=True
|
||||
)
|
||||
requires_approval: bool = SchemaField(
|
||||
description="Whether to enable approval workflow",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
random_post: bool = SchemaField(
|
||||
description="Whether to generate random post text",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
random_media_url: bool = SchemaField(
|
||||
description="Whether to generate random media", default=False, advanced=True
|
||||
)
|
||||
notes: Optional[str] = SchemaField(
|
||||
description="Additional notes for the post", default=None, advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
post_result: RequestOutput = SchemaField(description="The result of the post")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
id="b3a7b3b9-5169-410a-9d5c-fd625460fb14",
|
||||
description="Ayrshare Post",
|
||||
):
|
||||
super().__init__(
|
||||
# The unique identifier for the block, this value will be persisted in the DB.
|
||||
# It should be unique and constant across the application run.
|
||||
# Use the UUID format for the ID.
|
||||
id=id,
|
||||
# The description of the block, explaining what the block does.
|
||||
description=description,
|
||||
# The set of categories that the block belongs to.
|
||||
# Each category is an instance of BlockCategory Enum.
|
||||
categories={BlockCategory.SOCIAL},
|
||||
# The type of block, this is used to determine the block type in the UI.
|
||||
block_type=BlockType.AYRSHARE,
|
||||
# The schema, defined as a Pydantic model, for the input data.
|
||||
input_schema=AyrsharePostBlockBase.Input,
|
||||
# The schema, defined as a Pydantic model, for the output data.
|
||||
output_schema=AyrsharePostBlockBase.Output,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_client():
|
||||
return AyrshareClient()
|
||||
|
||||
def _create_post(
|
||||
self,
|
||||
input_data: "AyrsharePostBlockBase.Input",
|
||||
platforms: List[SocialPlatform],
|
||||
profile_key: Optional[str] = None,
|
||||
) -> PostResponse | PostError:
|
||||
client = self.create_client()
|
||||
"""Create a post on the specified platforms."""
|
||||
iso_date = (
|
||||
input_data.schedule_date.isoformat() if input_data.schedule_date else None
|
||||
)
|
||||
response = client.create_post(
|
||||
post=input_data.post,
|
||||
platforms=platforms,
|
||||
media_urls=input_data.media_urls,
|
||||
is_video=input_data.is_video,
|
||||
schedule_date=iso_date,
|
||||
disable_comments=input_data.disable_comments,
|
||||
shorten_links=input_data.shorten_links,
|
||||
unsplash=input_data.unsplash,
|
||||
requires_approval=input_data.requires_approval,
|
||||
random_post=input_data.random_post,
|
||||
random_media_url=input_data.random_media_url,
|
||||
notes=input_data.notes,
|
||||
profile_key=profile_key,
|
||||
)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: "AyrsharePostBlockBase.Input",
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Run the block."""
|
||||
platforms = [SocialPlatform.FACEBOOK]
|
||||
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data, platforms=platforms, profile_key=profile_key.get_secret_value()
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToFacebookBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Facebook."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3352f512-3524-49ed-a08f-003042da2fc1",
|
||||
description="Post to Facebook using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Facebook."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.FACEBOOK],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToXBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to X / Twitter."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9e8f844e-b4a5-4b25-80f2-9e1dd7d67625",
|
||||
description="Post to X / Twitter using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Twitter."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TWITTER],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToLinkedInBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to LinkedIn."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="589af4e4-507f-42fd-b9ac-a67ecef25811",
|
||||
description="Post to LinkedIn using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to LinkedIn."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.LINKEDIN],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToInstagramBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Instagram."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="89b02b96-a7cb-46f4-9900-c48b32fe1552",
|
||||
description="Post to Instagram using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Instagram."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.INSTAGRAM],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToYouTubeBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to YouTube."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0082d712-ff1b-4c3d-8a8d-6c7721883b83",
|
||||
description="Post to YouTube using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to YouTube."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.YOUTUBE],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToRedditBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Reddit."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c7733580-3c72-483e-8e47-a8d58754d853",
|
||||
description="Post to Reddit using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Reddit."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.REDDIT],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToTelegramBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Telegram."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="47bc74eb-4af2-452c-b933-af377c7287df",
|
||||
description="Post to Telegram using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Telegram."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TELEGRAM],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToGMBBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Google My Business."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2c38c783-c484-4503-9280-ef5d1d345a7e",
|
||||
description="Post to Google My Business using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Google My Business."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.GMB],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToPinterestBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Pinterest."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3ca46e05-dbaa-4afb-9e95-5a429c4177e6",
|
||||
description="Post to Pinterest using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Pinterest."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.PINTEREST],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToTikTokBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to TikTok."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7faf4b27-96b0-4f05-bf64-e0de54ae74e1",
|
||||
description="Post to TikTok using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to TikTok."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.TIKTOK],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
class PostToBlueskyBlock(AyrsharePostBlockBase):
|
||||
"""Block for posting to Bluesky."""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="cbd52c2a-06d2-43ed-9560-6576cc163283",
|
||||
description="Post to Bluesky using Ayrshare",
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: AyrsharePostBlockBase.Input,
|
||||
*,
|
||||
profile_key: SecretStr,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
"""Post to Bluesky."""
|
||||
if not profile_key:
|
||||
yield "error", "Please Link a social account via Ayrshare"
|
||||
return
|
||||
|
||||
post_result = self._create_post(
|
||||
input_data,
|
||||
[SocialPlatform.BLUESKY],
|
||||
profile_key=profile_key.get_secret_value(),
|
||||
)
|
||||
if isinstance(post_result, PostError):
|
||||
yield "error", post_result.message
|
||||
return
|
||||
yield "post_result", post_result
|
||||
|
||||
|
||||
AYRSHARE_NODE_IDS = [
|
||||
PostToBlueskyBlock().id,
|
||||
PostToFacebookBlock().id,
|
||||
PostToXBlock().id,
|
||||
PostToLinkedInBlock().id,
|
||||
PostToInstagramBlock().id,
|
||||
PostToYouTubeBlock().id,
|
||||
PostToRedditBlock().id,
|
||||
PostToTelegramBlock().id,
|
||||
PostToGMBBlock().id,
|
||||
PostToPinterestBlock().id,
|
||||
PostToTikTokBlock().id,
|
||||
]
|
||||
@@ -1,48 +1,11 @@
|
||||
import enum
|
||||
from typing import Any, List
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
from backend.util.file import store_media_file
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.type import MediaFileType, convert
|
||||
from backend.util.text import TextFormatter
|
||||
|
||||
|
||||
class FileStoreBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
file_in: MediaFileType = SchemaField(
|
||||
description="The file to store in the temporary directory, it can be a URL, data URI, or local path."
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
file_out: MediaFileType = SchemaField(
|
||||
description="The relative path to the stored file in the temporary directory."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="cbb50872-625b-42f0-8203-a2ae78242d8a",
|
||||
description="Stores the input file in the temporary directory.",
|
||||
categories={BlockCategory.BASIC, BlockCategory.MULTIMEDIA},
|
||||
input_schema=FileStoreBlock.Input,
|
||||
output_schema=FileStoreBlock.Output,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
file_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.file_in,
|
||||
return_content=False,
|
||||
)
|
||||
yield "file_out", file_path
|
||||
formatter = TextFormatter()
|
||||
|
||||
|
||||
class StoreValueBlock(Block):
|
||||
@@ -90,10 +53,9 @@ class StoreValueBlock(Block):
|
||||
|
||||
class PrintToConsoleBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: Any = SchemaField(description="The data to print to the console.")
|
||||
text: str = SchemaField(description="The text to print to the console.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The data printed to the console.")
|
||||
status: str = SchemaField(description="The status of the print operation.")
|
||||
|
||||
def __init__(self):
|
||||
@@ -104,14 +66,11 @@ class PrintToConsoleBlock(Block):
|
||||
input_schema=PrintToConsoleBlock.Input,
|
||||
output_schema=PrintToConsoleBlock.Output,
|
||||
test_input={"text": "Hello, World!"},
|
||||
test_output=[
|
||||
("output", "Hello, World!"),
|
||||
("status", "printed"),
|
||||
],
|
||||
test_output=("status", "printed"),
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "output", input_data.text
|
||||
print(">>>>> Print: ", input_data.text)
|
||||
yield "status", "printed"
|
||||
|
||||
|
||||
@@ -155,9 +114,6 @@ class FindInDictionaryBlock(Block):
|
||||
obj = input_data.input
|
||||
key = input_data.key
|
||||
|
||||
if isinstance(obj, str):
|
||||
obj = json.loads(obj)
|
||||
|
||||
if isinstance(obj, dict) and key in obj:
|
||||
yield "output", obj[key]
|
||||
elif isinstance(obj, list) and isinstance(key, int) and 0 <= key < len(obj):
|
||||
@@ -175,10 +131,190 @@ class FindInDictionaryBlock(Block):
|
||||
yield "missing", input_data.input
|
||||
|
||||
|
||||
class AgentInputBlock(Block):
|
||||
"""
|
||||
This block is used to provide input to the graph.
|
||||
|
||||
It takes in a value, name, description, default values list and bool to limit selection to default values.
|
||||
|
||||
It Outputs the value passed as input.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
value: Any = SchemaField(
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the input.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: List[Any] = SchemaField(
|
||||
description="The placeholder values to be passed as input.",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
limit_to_placeholder_values: bool = SchemaField(
|
||||
description="Whether to limit the selection to placeholder values.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to show the input in the advanced section, if the field is not required.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the input should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: Any = SchemaField(description="The value passed as input.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
description="This block is used to provide input to the graph.",
|
||||
input_schema=AgentInputBlock.Input,
|
||||
output_schema=AgentInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_1",
|
||||
"description": "This is a test input.",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
},
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_2",
|
||||
"description": "This is a test input.",
|
||||
"placeholder_values": ["Hello, World!"],
|
||||
"limit_to_placeholder_values": True,
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Hello, World!"),
|
||||
("result", "Hello, World!"),
|
||||
],
|
||||
categories={BlockCategory.INPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.INPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "result", input_data.value
|
||||
|
||||
|
||||
class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
Behavior:
|
||||
If `format` is provided and the `value` is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the `format`.
|
||||
If formatting fails or no `format` is provided, the raw `value` is output.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to be recorded as output.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value. Use Jinja2 syntax.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to treat the output as advanced.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the output should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
description="Stores the output of the graph for users to see.",
|
||||
input_schema=AgentOutputBlock.Input,
|
||||
output_schema=AgentOutputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "output_1",
|
||||
"description": "This is a test output.",
|
||||
"format": "{{ output_1 }}!!",
|
||||
},
|
||||
{
|
||||
"value": "42",
|
||||
"name": "output_2",
|
||||
"description": "This is another test output.",
|
||||
"format": "{{ output_2 }}",
|
||||
},
|
||||
{
|
||||
"value": MockObject(value="!!", key="key"),
|
||||
"name": "output_3",
|
||||
"description": "This is a test output with a mock object.",
|
||||
"format": "{{ output_3 }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World!!!"),
|
||||
("output", "42"),
|
||||
("output", MockObject(value="!!", key="key")),
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.OUTPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
"""
|
||||
Attempts to format the recorded_value using the fmt_string if provided.
|
||||
If formatting fails or no fmt_string is given, returns the original recorded_value.
|
||||
"""
|
||||
if input_data.format:
|
||||
try:
|
||||
yield "output", formatter.format_string(
|
||||
input_data.format, {input_data.name: input_data.value}
|
||||
)
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.value
|
||||
|
||||
|
||||
class AddToDictionaryBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
dictionary: dict[Any, Any] = SchemaField(
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="The dictionary to add the entry to. If not provided, a new dictionary will be created.",
|
||||
)
|
||||
key: str = SchemaField(
|
||||
@@ -194,7 +330,7 @@ class AddToDictionaryBlock(Block):
|
||||
advanced=False,
|
||||
)
|
||||
entries: dict[Any, Any] = SchemaField(
|
||||
default_factory=dict,
|
||||
default={},
|
||||
description="The entries to add to the dictionary. This is the batch version of the `key` and `value` fields.",
|
||||
advanced=True,
|
||||
)
|
||||
@@ -256,7 +392,7 @@ class AddToDictionaryBlock(Block):
|
||||
class AddToListBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
list: List[Any] = SchemaField(
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
description="The list to add the entry to. If not provided, a new list will be created.",
|
||||
)
|
||||
@@ -266,7 +402,7 @@ class AddToListBlock(Block):
|
||||
default=None,
|
||||
)
|
||||
entries: List[Any] = SchemaField(
|
||||
default_factory=lambda: list(),
|
||||
default=[],
|
||||
description="The entries to add to the list. This is the batch version of the `entry` field.",
|
||||
advanced=True,
|
||||
)
|
||||
@@ -333,48 +469,6 @@ class AddToListBlock(Block):
|
||||
yield "updated_list", updated_list
|
||||
|
||||
|
||||
class FindInListBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
list: List[Any] = SchemaField(description="The list to search in.")
|
||||
value: Any = SchemaField(description="The value to search for.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
index: int = SchemaField(description="The index of the value in the list.")
|
||||
found: bool = SchemaField(
|
||||
description="Whether the value was found in the list."
|
||||
)
|
||||
not_found_value: Any = SchemaField(
|
||||
description="The value that was not found in the list."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5e2c6d0a-1e37-489f-b1d0-8e1812b23333",
|
||||
description="Finds the index of the value in the list.",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=FindInListBlock.Input,
|
||||
output_schema=FindInListBlock.Output,
|
||||
test_input=[
|
||||
{"list": [1, 2, 3, 4, 5], "value": 3},
|
||||
{"list": [1, 2, 3, 4, 5], "value": 6},
|
||||
],
|
||||
test_output=[
|
||||
("index", 2),
|
||||
("found", True),
|
||||
("found", False),
|
||||
("not_found_value", 6),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
yield "index", input_data.list.index(input_data.value)
|
||||
yield "found", True
|
||||
except ValueError:
|
||||
yield "found", False
|
||||
yield "not_found_value", input_data.value
|
||||
|
||||
|
||||
class NoteBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to display in the sticky note.")
|
||||
@@ -496,47 +590,3 @@ class CreateListBlock(Block):
|
||||
yield "list", input_data.values
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to create list: {str(e)}"
|
||||
|
||||
|
||||
class TypeOptions(enum.Enum):
|
||||
STRING = "string"
|
||||
NUMBER = "number"
|
||||
BOOLEAN = "boolean"
|
||||
LIST = "list"
|
||||
DICTIONARY = "dictionary"
|
||||
|
||||
|
||||
class UniversalTypeConverterBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to convert to a universal type."
|
||||
)
|
||||
type: TypeOptions = SchemaField(description="The type to convert the value to.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
value: Any = SchemaField(description="The converted value.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="95d1b990-ce13-4d88-9737-ba5c2070c97b",
|
||||
description="This block is used to convert a value to a universal type.",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=UniversalTypeConverterBlock.Input,
|
||||
output_schema=UniversalTypeConverterBlock.Output,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
converted_value = convert(
|
||||
input_data.value,
|
||||
{
|
||||
TypeOptions.STRING: str,
|
||||
TypeOptions.NUMBER: float,
|
||||
TypeOptions.BOOLEAN: bool,
|
||||
TypeOptions.LIST: list,
|
||||
TypeOptions.DICTIONARY: dict,
|
||||
}[input_data.type],
|
||||
)
|
||||
yield "value", converted_value
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to convert value: {str(e)}"
|
||||
|
||||
@@ -107,83 +107,3 @@ class ConditionBlock(Block):
|
||||
yield "yes_output", yes_value
|
||||
else:
|
||||
yield "no_output", no_value
|
||||
|
||||
|
||||
class IfInputMatchesBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
input: Any = SchemaField(
|
||||
description="The input to match against",
|
||||
placeholder="For example: 10 or 'hello' or True",
|
||||
)
|
||||
value: Any = SchemaField(
|
||||
description="The value to output if the input matches",
|
||||
placeholder="For example: 'Greater' or 20 or False",
|
||||
)
|
||||
yes_value: Any = SchemaField(
|
||||
description="The value to output if the input matches",
|
||||
placeholder="For example: 'Greater' or 20 or False",
|
||||
default=None,
|
||||
)
|
||||
no_value: Any = SchemaField(
|
||||
description="The value to output if the input does not match",
|
||||
placeholder="For example: 'Greater' or 20 or False",
|
||||
default=None,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: bool = SchemaField(
|
||||
description="The result of the condition evaluation (True or False)"
|
||||
)
|
||||
yes_output: Any = SchemaField(
|
||||
description="The output value if the condition is true"
|
||||
)
|
||||
no_output: Any = SchemaField(
|
||||
description="The output value if the condition is false"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="6dbbc4b3-ca6c-42b6-b508-da52d23e13f2",
|
||||
input_schema=IfInputMatchesBlock.Input,
|
||||
output_schema=IfInputMatchesBlock.Output,
|
||||
description="Handles conditional logic based on comparison operators",
|
||||
categories={BlockCategory.LOGIC},
|
||||
test_input=[
|
||||
{
|
||||
"input": 10,
|
||||
"value": 10,
|
||||
"yes_value": "Greater",
|
||||
"no_value": "Not greater",
|
||||
},
|
||||
{
|
||||
"input": 10,
|
||||
"value": 20,
|
||||
"yes_value": "Greater",
|
||||
"no_value": "Not greater",
|
||||
},
|
||||
{
|
||||
"input": 10,
|
||||
"value": None,
|
||||
"yes_value": "Yes",
|
||||
"no_value": "No",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", True),
|
||||
("yes_output", "Greater"),
|
||||
("result", False),
|
||||
("no_output", "Not greater"),
|
||||
("result", False),
|
||||
("no_output", "No"),
|
||||
# ("result", True),
|
||||
# ("yes_output", "Yes"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
if input_data.input == input_data.value or input_data.input is input_data.value:
|
||||
yield "result", True
|
||||
yield "yes_output", input_data.yes_value
|
||||
else:
|
||||
yield "result", False
|
||||
yield "no_output", input_data.no_value
|
||||
|
||||
@@ -55,7 +55,7 @@ class CodeExecutionBlock(Block):
|
||||
"These commands are executed with `sh`, in the foreground."
|
||||
),
|
||||
placeholder="pip install cowsay",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
@@ -188,270 +188,3 @@ class CodeExecutionBlock(Block):
|
||||
yield "stderr_logs", stderr_logs
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class InstantiationBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.E2B], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Enter your api key for the E2B Sandbox. You can get it in here - https://e2b.dev/docs",
|
||||
)
|
||||
|
||||
# Todo : Option to run commond in background
|
||||
setup_commands: list[str] = SchemaField(
|
||||
description=(
|
||||
"Shell commands to set up the sandbox before running the code. "
|
||||
"You can use `curl` or `git` to install your desired Debian based "
|
||||
"package manager. `pip` and `npm` are pre-installed.\n\n"
|
||||
"These commands are executed with `sh`, in the foreground."
|
||||
),
|
||||
placeholder="pip install cowsay",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
setup_code: str = SchemaField(
|
||||
description="Code to execute in the sandbox",
|
||||
placeholder="print('Hello, World!')",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
language: ProgrammingLanguage = SchemaField(
|
||||
description="Programming language to execute",
|
||||
default=ProgrammingLanguage.PYTHON,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
timeout: int = SchemaField(
|
||||
description="Execution timeout in seconds", default=300
|
||||
)
|
||||
|
||||
template_id: str = SchemaField(
|
||||
description=(
|
||||
"You can use an E2B sandbox template by entering its ID here. "
|
||||
"Check out the E2B docs for more details: "
|
||||
"[E2B - Sandbox template](https://e2b.dev/docs/sandbox-template)"
|
||||
),
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
sandbox_id: str = SchemaField(description="ID of the sandbox instance")
|
||||
response: str = SchemaField(description="Response from code execution")
|
||||
stdout_logs: str = SchemaField(
|
||||
description="Standard output logs from execution"
|
||||
)
|
||||
stderr_logs: str = SchemaField(description="Standard error logs from execution")
|
||||
error: str = SchemaField(description="Error message if execution failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ff0861c9-1726-4aec-9e5b-bf53f3622112",
|
||||
description="Instantiate an isolated sandbox environment with internet access where to execute code in.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=InstantiationBlock.Input,
|
||||
output_schema=InstantiationBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"setup_code": "print('Hello World')",
|
||||
"language": ProgrammingLanguage.PYTHON.value,
|
||||
"setup_commands": [],
|
||||
"timeout": 300,
|
||||
"template_id": "",
|
||||
},
|
||||
test_output=[
|
||||
("sandbox_id", str),
|
||||
("response", "Hello World"),
|
||||
("stdout_logs", "Hello World\n"),
|
||||
],
|
||||
test_mock={
|
||||
"execute_code": lambda setup_code, language, setup_commands, timeout, api_key, template_id: (
|
||||
"sandbox_id",
|
||||
"Hello World",
|
||||
"Hello World\n",
|
||||
"",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
sandbox_id, response, stdout_logs, stderr_logs = self.execute_code(
|
||||
input_data.setup_code,
|
||||
input_data.language,
|
||||
input_data.setup_commands,
|
||||
input_data.timeout,
|
||||
credentials.api_key.get_secret_value(),
|
||||
input_data.template_id,
|
||||
)
|
||||
if sandbox_id:
|
||||
yield "sandbox_id", sandbox_id
|
||||
else:
|
||||
yield "error", "Sandbox ID not found"
|
||||
if response:
|
||||
yield "response", response
|
||||
if stdout_logs:
|
||||
yield "stdout_logs", stdout_logs
|
||||
if stderr_logs:
|
||||
yield "stderr_logs", stderr_logs
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
def execute_code(
|
||||
self,
|
||||
code: str,
|
||||
language: ProgrammingLanguage,
|
||||
setup_commands: list[str],
|
||||
timeout: int,
|
||||
api_key: str,
|
||||
template_id: str,
|
||||
):
|
||||
try:
|
||||
sandbox = None
|
||||
if template_id:
|
||||
sandbox = Sandbox(
|
||||
template=template_id, api_key=api_key, timeout=timeout
|
||||
)
|
||||
else:
|
||||
sandbox = Sandbox(api_key=api_key, timeout=timeout)
|
||||
|
||||
if not sandbox:
|
||||
raise Exception("Sandbox not created")
|
||||
|
||||
# Running setup commands
|
||||
for cmd in setup_commands:
|
||||
sandbox.commands.run(cmd)
|
||||
|
||||
# Executing the code
|
||||
execution = sandbox.run_code(
|
||||
code,
|
||||
language=language.value,
|
||||
on_error=lambda e: sandbox.kill(), # Kill the sandbox if there is an error
|
||||
)
|
||||
|
||||
if execution.error:
|
||||
raise Exception(execution.error)
|
||||
|
||||
response = execution.text
|
||||
stdout_logs = "".join(execution.logs.stdout)
|
||||
stderr_logs = "".join(execution.logs.stderr)
|
||||
|
||||
return sandbox.sandbox_id, response, stdout_logs, stderr_logs
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
||||
class StepExecutionBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.E2B], Literal["api_key"]
|
||||
] = CredentialsField(
|
||||
description="Enter your api key for the E2B Sandbox. You can get it in here - https://e2b.dev/docs",
|
||||
)
|
||||
|
||||
sandbox_id: str = SchemaField(
|
||||
description="ID of the sandbox instance to execute the code in",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
step_code: str = SchemaField(
|
||||
description="Code to execute in the sandbox",
|
||||
placeholder="print('Hello, World!')",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
language: ProgrammingLanguage = SchemaField(
|
||||
description="Programming language to execute",
|
||||
default=ProgrammingLanguage.PYTHON,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: str = SchemaField(description="Response from code execution")
|
||||
stdout_logs: str = SchemaField(
|
||||
description="Standard output logs from execution"
|
||||
)
|
||||
stderr_logs: str = SchemaField(description="Standard error logs from execution")
|
||||
error: str = SchemaField(description="Error message if execution failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="82b59b8e-ea10-4d57-9161-8b169b0adba6",
|
||||
description="Execute code in a previously instantiated sandbox environment.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=StepExecutionBlock.Input,
|
||||
output_schema=StepExecutionBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"sandbox_id": "sandbox_id",
|
||||
"step_code": "print('Hello World')",
|
||||
"language": ProgrammingLanguage.PYTHON.value,
|
||||
},
|
||||
test_output=[
|
||||
("response", "Hello World"),
|
||||
("stdout_logs", "Hello World\n"),
|
||||
],
|
||||
test_mock={
|
||||
"execute_step_code": lambda sandbox_id, step_code, language, api_key: (
|
||||
"Hello World",
|
||||
"Hello World\n",
|
||||
"",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
def execute_step_code(
|
||||
self,
|
||||
sandbox_id: str,
|
||||
code: str,
|
||||
language: ProgrammingLanguage,
|
||||
api_key: str,
|
||||
):
|
||||
try:
|
||||
sandbox = Sandbox.connect(sandbox_id=sandbox_id, api_key=api_key)
|
||||
if not sandbox:
|
||||
raise Exception("Sandbox not found")
|
||||
|
||||
# Executing the code
|
||||
execution = sandbox.run_code(code, language=language.value)
|
||||
|
||||
if execution.error:
|
||||
raise Exception(execution.error)
|
||||
|
||||
response = execution.text
|
||||
stdout_logs = "".join(execution.logs.stdout)
|
||||
stderr_logs = "".join(execution.logs.stderr)
|
||||
|
||||
return response, stdout_logs, stderr_logs
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
response, stdout_logs, stderr_logs = self.execute_step_code(
|
||||
input_data.sandbox_id,
|
||||
input_data.step_code,
|
||||
input_data.language,
|
||||
credentials.api_key.get_secret_value(),
|
||||
)
|
||||
|
||||
if response:
|
||||
yield "response", response
|
||||
if stdout_logs:
|
||||
yield "stdout_logs", stdout_logs
|
||||
if stderr_logs:
|
||||
yield "stderr_logs", stderr_logs
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@@ -8,7 +8,6 @@ from backend.data.block import (
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks.compass import CompassWebhookType
|
||||
|
||||
|
||||
@@ -43,7 +42,7 @@ class CompassAITriggerBlock(Block):
|
||||
input_schema=CompassAITriggerBlock.Input,
|
||||
output_schema=CompassAITriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider=ProviderName.COMPASS,
|
||||
provider="compass",
|
||||
webhook_type=CompassWebhookType.TRANSCRIPTION,
|
||||
),
|
||||
test_input=[
|
||||
|
||||
@@ -34,7 +34,7 @@ class ReadCsvBlock(Block):
|
||||
)
|
||||
skip_columns: list[str] = SchemaField(
|
||||
description="The columns to skip from the start of the row",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -1,53 +1,22 @@
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, SecretStr
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
UserPasswordCredentials,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = UserPasswordCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="smtp",
|
||||
username=SecretStr("mock-smtp-username"),
|
||||
password=SecretStr("mock-smtp-password"),
|
||||
title="Mock SMTP credentials",
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
SMTPCredentials = UserPasswordCredentials
|
||||
SMTPCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.SMTP],
|
||||
Literal["user_password"],
|
||||
]
|
||||
from backend.data.model import BlockSecret, SchemaField, SecretField
|
||||
|
||||
|
||||
def SMTPCredentialsField() -> SMTPCredentialsInput:
|
||||
return CredentialsField(
|
||||
description="The SMTP integration requires a username and password.",
|
||||
)
|
||||
|
||||
|
||||
class SMTPConfig(BaseModel):
|
||||
class EmailCredentials(BaseModel):
|
||||
smtp_server: str = SchemaField(
|
||||
default="smtp.example.com", description="SMTP server address"
|
||||
default="smtp.gmail.com", description="SMTP server address"
|
||||
)
|
||||
smtp_port: int = SchemaField(default=25, description="SMTP port number")
|
||||
smtp_username: BlockSecret = SecretField(key="smtp_username")
|
||||
smtp_password: BlockSecret = SecretField(key="smtp_password")
|
||||
|
||||
model_config = ConfigDict(title="SMTP Config")
|
||||
model_config = ConfigDict(title="Email Credentials")
|
||||
|
||||
|
||||
class SendEmailBlock(Block):
|
||||
@@ -61,11 +30,10 @@ class SendEmailBlock(Block):
|
||||
body: str = SchemaField(
|
||||
description="Body of the email", placeholder="Enter the email body"
|
||||
)
|
||||
config: SMTPConfig = SchemaField(
|
||||
description="SMTP Config",
|
||||
default=SMTPConfig(),
|
||||
creds: EmailCredentials = SchemaField(
|
||||
description="SMTP credentials",
|
||||
default=EmailCredentials(),
|
||||
)
|
||||
credentials: SMTPCredentialsInput = SMTPCredentialsField()
|
||||
|
||||
class Output(BlockSchema):
|
||||
status: str = SchemaField(description="Status of the email sending operation")
|
||||
@@ -75,6 +43,7 @@ class SendEmailBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
disabled=True,
|
||||
id="4335878a-394e-4e67-adf2-919877ff49ae",
|
||||
description="This block sends an email using the provided SMTP credentials.",
|
||||
categories={BlockCategory.OUTPUT},
|
||||
@@ -84,29 +53,25 @@ class SendEmailBlock(Block):
|
||||
"to_email": "recipient@example.com",
|
||||
"subject": "Test Email",
|
||||
"body": "This is a test email.",
|
||||
"config": {
|
||||
"creds": {
|
||||
"smtp_server": "smtp.gmail.com",
|
||||
"smtp_port": 25,
|
||||
"smtp_username": "your-email@gmail.com",
|
||||
"smtp_password": "your-gmail-password",
|
||||
},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "Email sent successfully")],
|
||||
test_mock={"send_email": lambda *args, **kwargs: "Email sent successfully"},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def send_email(
|
||||
config: SMTPConfig,
|
||||
to_email: str,
|
||||
subject: str,
|
||||
body: str,
|
||||
credentials: SMTPCredentials,
|
||||
creds: EmailCredentials, to_email: str, subject: str, body: str
|
||||
) -> str:
|
||||
smtp_server = config.smtp_server
|
||||
smtp_port = config.smtp_port
|
||||
smtp_username = credentials.username.get_secret_value()
|
||||
smtp_password = credentials.password.get_secret_value()
|
||||
smtp_server = creds.smtp_server
|
||||
smtp_port = creds.smtp_port
|
||||
smtp_username = creds.smtp_username.get_secret_value()
|
||||
smtp_password = creds.smtp_password.get_secret_value()
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg["From"] = smtp_username
|
||||
@@ -121,13 +86,10 @@ class SendEmailBlock(Block):
|
||||
|
||||
return "Email sent successfully"
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: SMTPCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "status", self.send_email(
|
||||
config=input_data.config,
|
||||
to_email=input_data.to_email,
|
||||
subject=input_data.subject,
|
||||
body=input_data.body,
|
||||
credentials=credentials,
|
||||
input_data.creds,
|
||||
input_data.to_email,
|
||||
input_data.subject,
|
||||
input_data.body,
|
||||
)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from typing import List
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -13,12 +13,12 @@ from backend.util.request import requests
|
||||
|
||||
|
||||
class ContentRetrievalSettings(BaseModel):
|
||||
text: dict = SchemaField(
|
||||
text: Optional[dict] = SchemaField(
|
||||
description="Text content settings",
|
||||
default={"maxCharacters": 1000, "includeHtmlTags": False},
|
||||
advanced=True,
|
||||
)
|
||||
highlights: dict = SchemaField(
|
||||
highlights: Optional[dict] = SchemaField(
|
||||
description="Highlight settings",
|
||||
default={
|
||||
"numSentences": 3,
|
||||
@@ -27,7 +27,7 @@ class ContentRetrievalSettings(BaseModel):
|
||||
},
|
||||
advanced=True,
|
||||
)
|
||||
summary: dict = SchemaField(
|
||||
summary: Optional[dict] = SchemaField(
|
||||
description="Summary settings",
|
||||
default={"query": ""},
|
||||
advanced=True,
|
||||
@@ -49,9 +49,8 @@ class ExaContentsBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of document contents",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
|
||||
@@ -38,11 +38,11 @@ class ExaSearchBlock(Block):
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
@@ -59,12 +59,12 @@ class ExaSearchBlock(Block):
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
@@ -76,7 +76,7 @@ class ExaSearchBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: list = SchemaField(
|
||||
description="List of search results",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -26,12 +26,12 @@ class ExaFindSimilarBlock(Block):
|
||||
)
|
||||
include_domains: List[str] = SchemaField(
|
||||
description="Domains to include in search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_domains: List[str] = SchemaField(
|
||||
description="Domains to exclude from search",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
start_crawl_date: datetime = SchemaField(
|
||||
@@ -48,12 +48,12 @@ class ExaFindSimilarBlock(Block):
|
||||
)
|
||||
include_text: List[str] = SchemaField(
|
||||
description="Text patterns to include (max 1 string, up to 5 words)",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
exclude_text: List[str] = SchemaField(
|
||||
description="Text patterns to exclude (max 1 string, up to 5 words)",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
contents: ContentSettings = SchemaField(
|
||||
@@ -65,7 +65,7 @@ class ExaFindSimilarBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
results: List[Any] = SchemaField(
|
||||
description="List of similar documents with title, URL, published date, author, and score",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -42,7 +42,7 @@ class AIVideoGeneratorBlock(Block):
|
||||
description="Error message if video generation failed."
|
||||
)
|
||||
logs: list[str] = SchemaField(
|
||||
description="Generation progress logs.",
|
||||
description="Generation progress logs.", optional=True
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockManualWebhookConfig,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks.generic import GenericWebhookType
|
||||
|
||||
|
||||
class GenericWebhookTriggerBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
constants: dict = SchemaField(
|
||||
description="The constants to be set when the block is put on the graph",
|
||||
default_factory=dict,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload that was received from the generic webhook."
|
||||
)
|
||||
constants: dict = SchemaField(
|
||||
description="The constants to be set when the block is put on the graph"
|
||||
)
|
||||
|
||||
example_payload = {"message": "Hello, World!"}
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8fa8c167-2002-47ce-aba8-97572fc5d387",
|
||||
description="This block will output the contents of the generic input for the webhook.",
|
||||
categories={BlockCategory.INPUT},
|
||||
input_schema=GenericWebhookTriggerBlock.Input,
|
||||
output_schema=GenericWebhookTriggerBlock.Output,
|
||||
webhook_config=BlockManualWebhookConfig(
|
||||
provider=ProviderName.GENERIC_WEBHOOK,
|
||||
webhook_type=GenericWebhookType.PLAIN,
|
||||
),
|
||||
test_input={"constants": {"key": "value"}, "payload": self.example_payload},
|
||||
test_output=[
|
||||
("constants", {"key": "value"}),
|
||||
("payload", self.example_payload),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "constants", input_data.constants
|
||||
yield "payload", input_data.payload
|
||||
@@ -1,9 +1,6 @@
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from backend.blocks.github._auth import (
|
||||
GithubCredentials,
|
||||
GithubFineGrainedAPICredentials,
|
||||
)
|
||||
from backend.blocks.github._auth import GithubCredentials
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
@@ -33,68 +30,12 @@ def _convert_to_api_url(url: str) -> str:
|
||||
|
||||
def _get_headers(credentials: GithubCredentials) -> dict[str, str]:
|
||||
return {
|
||||
"Authorization": credentials.auth_header(),
|
||||
"Authorization": credentials.bearer(),
|
||||
"Accept": "application/vnd.github.v3+json",
|
||||
}
|
||||
|
||||
|
||||
def convert_comment_url_to_api_endpoint(comment_url: str) -> str:
|
||||
"""
|
||||
Converts a GitHub comment URL (web interface) to the appropriate API endpoint URL.
|
||||
|
||||
Handles:
|
||||
1. Issue/PR comments: #issuecomment-{id}
|
||||
2. PR review comments: #discussion_r{id}
|
||||
|
||||
Returns the appropriate API endpoint path for the comment.
|
||||
"""
|
||||
# First, check if this is already an API URL
|
||||
parsed_url = urlparse(comment_url)
|
||||
if parsed_url.hostname == "api.github.com":
|
||||
return comment_url
|
||||
|
||||
# Replace pull with issues for comment endpoints
|
||||
if "/pull/" in comment_url:
|
||||
comment_url = comment_url.replace("/pull/", "/issues/")
|
||||
|
||||
# Handle issue/PR comments (#issuecomment-xxx)
|
||||
if "#issuecomment-" in comment_url:
|
||||
base_url, comment_part = comment_url.split("#issuecomment-")
|
||||
comment_id = comment_part
|
||||
|
||||
# Extract repo information from base URL
|
||||
parsed_url = urlparse(base_url)
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
owner, repo = path_parts[0], path_parts[1]
|
||||
|
||||
# Construct API URL for issue comments
|
||||
return (
|
||||
f"https://api.github.com/repos/{owner}/{repo}/issues/comments/{comment_id}"
|
||||
)
|
||||
|
||||
# Handle PR review comments (#discussion_r)
|
||||
elif "#discussion_r" in comment_url:
|
||||
base_url, comment_part = comment_url.split("#discussion_r")
|
||||
comment_id = comment_part
|
||||
|
||||
# Extract repo information from base URL
|
||||
parsed_url = urlparse(base_url)
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
owner, repo = path_parts[0], path_parts[1]
|
||||
|
||||
# Construct API URL for PR review comments
|
||||
return (
|
||||
f"https://api.github.com/repos/{owner}/{repo}/pulls/comments/{comment_id}"
|
||||
)
|
||||
|
||||
# If no specific comment identifiers are found, use the general URL conversion
|
||||
return _convert_to_api_url(comment_url)
|
||||
|
||||
|
||||
def get_api(
|
||||
credentials: GithubCredentials | GithubFineGrainedAPICredentials,
|
||||
convert_urls: bool = True,
|
||||
) -> Requests:
|
||||
def get_api(credentials: GithubCredentials, convert_urls: bool = True) -> Requests:
|
||||
return Requests(
|
||||
trusted_origins=["https://api.github.com", "https://github.com"],
|
||||
extra_url_validator=_convert_to_api_url if convert_urls else None,
|
||||
|
||||
@@ -22,11 +22,6 @@ GithubCredentialsInput = CredentialsMetaInput[
|
||||
Literal["api_key", "oauth2"] if GITHUB_OAUTH_IS_CONFIGURED else Literal["api_key"],
|
||||
]
|
||||
|
||||
GithubFineGrainedAPICredentials = APIKeyCredentials
|
||||
GithubFineGrainedAPICredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.GITHUB], Literal["api_key"]
|
||||
]
|
||||
|
||||
|
||||
def GithubCredentialsField(scope: str) -> GithubCredentialsInput:
|
||||
"""
|
||||
@@ -42,16 +37,6 @@ def GithubCredentialsField(scope: str) -> GithubCredentialsInput:
|
||||
)
|
||||
|
||||
|
||||
def GithubFineGrainedAPICredentialsField(
|
||||
scope: str,
|
||||
) -> GithubFineGrainedAPICredentialsInput:
|
||||
return CredentialsField(
|
||||
required_scopes={scope},
|
||||
description="The GitHub integration can be used with OAuth, "
|
||||
"or any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="github",
|
||||
@@ -65,18 +50,3 @@ TEST_CREDENTIALS_INPUT = {
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.type,
|
||||
}
|
||||
|
||||
TEST_FINE_GRAINED_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="github",
|
||||
api_key=SecretStr("mock-github-api-key"),
|
||||
title="Mock GitHub API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_FINE_GRAINED_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_FINE_GRAINED_CREDENTIALS.provider,
|
||||
"id": TEST_FINE_GRAINED_CREDENTIALS.id,
|
||||
"type": TEST_FINE_GRAINED_CREDENTIALS.type,
|
||||
"title": TEST_FINE_GRAINED_CREDENTIALS.type,
|
||||
}
|
||||
|
||||
@@ -1,360 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GithubCredentials,
|
||||
GithubCredentialsField,
|
||||
GithubCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
# queued, in_progress, completed, waiting, requested, pending
|
||||
class ChecksStatus(Enum):
|
||||
QUEUED = "queued"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED = "completed"
|
||||
WAITING = "waiting"
|
||||
REQUESTED = "requested"
|
||||
PENDING = "pending"
|
||||
|
||||
|
||||
class ChecksConclusion(Enum):
|
||||
SUCCESS = "success"
|
||||
FAILURE = "failure"
|
||||
NEUTRAL = "neutral"
|
||||
CANCELLED = "cancelled"
|
||||
TIMED_OUT = "timed_out"
|
||||
ACTION_REQUIRED = "action_required"
|
||||
SKIPPED = "skipped"
|
||||
|
||||
|
||||
class GithubCreateCheckRunBlock(Block):
|
||||
"""Block for creating a new check run on a GitHub repository."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo:status")
|
||||
repo_url: str = SchemaField(
|
||||
description="URL of the GitHub repository",
|
||||
placeholder="https://github.com/owner/repo",
|
||||
)
|
||||
name: str = SchemaField(
|
||||
description="The name of the check run (e.g., 'code-coverage')",
|
||||
)
|
||||
head_sha: str = SchemaField(
|
||||
description="The SHA of the commit to check",
|
||||
)
|
||||
status: ChecksStatus = SchemaField(
|
||||
description="Current status of the check run",
|
||||
default=ChecksStatus.QUEUED,
|
||||
)
|
||||
conclusion: Optional[ChecksConclusion] = SchemaField(
|
||||
description="The final conclusion of the check (required if status is completed)",
|
||||
default=None,
|
||||
)
|
||||
details_url: str = SchemaField(
|
||||
description="The URL for the full details of the check",
|
||||
default="",
|
||||
)
|
||||
output_title: str = SchemaField(
|
||||
description="Title of the check run output",
|
||||
default="",
|
||||
)
|
||||
output_summary: str = SchemaField(
|
||||
description="Summary of the check run output",
|
||||
default="",
|
||||
)
|
||||
output_text: str = SchemaField(
|
||||
description="Detailed text of the check run output",
|
||||
default="",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
class CheckRunResult(BaseModel):
|
||||
id: int
|
||||
html_url: str
|
||||
status: str
|
||||
|
||||
check_run: CheckRunResult = SchemaField(
|
||||
description="Details of the created check run"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if check run creation failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2f45e89a-3b7d-4f22-b89e-6c4f5c7e1234",
|
||||
description="Creates a new check run for a specific commit in a GitHub repository",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubCreateCheckRunBlock.Input,
|
||||
output_schema=GithubCreateCheckRunBlock.Output,
|
||||
test_input={
|
||||
"repo_url": "https://github.com/owner/repo",
|
||||
"name": "test-check",
|
||||
"head_sha": "ce587453ced02b1526dfb4cb910479d431683101",
|
||||
"status": ChecksStatus.COMPLETED.value,
|
||||
"conclusion": ChecksConclusion.SUCCESS.value,
|
||||
"output_title": "Test Results",
|
||||
"output_summary": "All tests passed",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
# requires a github app not available to oauth in our current system
|
||||
disabled=True,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"check_run",
|
||||
{
|
||||
"id": 4,
|
||||
"html_url": "https://github.com/owner/repo/runs/4",
|
||||
"status": "completed",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"create_check_run": lambda *args, **kwargs: {
|
||||
"id": 4,
|
||||
"html_url": "https://github.com/owner/repo/runs/4",
|
||||
"status": "completed",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_check_run(
|
||||
credentials: GithubCredentials,
|
||||
repo_url: str,
|
||||
name: str,
|
||||
head_sha: str,
|
||||
status: ChecksStatus,
|
||||
conclusion: Optional[ChecksConclusion] = None,
|
||||
details_url: Optional[str] = None,
|
||||
output_title: Optional[str] = None,
|
||||
output_summary: Optional[str] = None,
|
||||
output_text: Optional[str] = None,
|
||||
) -> dict:
|
||||
api = get_api(credentials)
|
||||
|
||||
class CheckRunData(BaseModel):
|
||||
name: str
|
||||
head_sha: str
|
||||
status: str
|
||||
conclusion: Optional[str] = None
|
||||
details_url: Optional[str] = None
|
||||
output: Optional[dict[str, str]] = None
|
||||
|
||||
data = CheckRunData(
|
||||
name=name,
|
||||
head_sha=head_sha,
|
||||
status=status.value,
|
||||
)
|
||||
|
||||
if conclusion:
|
||||
data.conclusion = conclusion.value
|
||||
|
||||
if details_url:
|
||||
data.details_url = details_url
|
||||
|
||||
if output_title or output_summary or output_text:
|
||||
output_data = {
|
||||
"title": output_title or "",
|
||||
"summary": output_summary or "",
|
||||
"text": output_text or "",
|
||||
}
|
||||
data.output = output_data
|
||||
|
||||
check_runs_url = f"{repo_url}/check-runs"
|
||||
response = api.post(
|
||||
check_runs_url, data=data.model_dump_json(exclude_none=True)
|
||||
)
|
||||
result = response.json()
|
||||
|
||||
return {
|
||||
"id": result["id"],
|
||||
"html_url": result["html_url"],
|
||||
"status": result["status"],
|
||||
}
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GithubCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self.create_check_run(
|
||||
credentials=credentials,
|
||||
repo_url=input_data.repo_url,
|
||||
name=input_data.name,
|
||||
head_sha=input_data.head_sha,
|
||||
status=input_data.status,
|
||||
conclusion=input_data.conclusion,
|
||||
details_url=input_data.details_url,
|
||||
output_title=input_data.output_title,
|
||||
output_summary=input_data.output_summary,
|
||||
output_text=input_data.output_text,
|
||||
)
|
||||
yield "check_run", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class GithubUpdateCheckRunBlock(Block):
|
||||
"""Block for updating an existing check run on a GitHub repository."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo:status")
|
||||
repo_url: str = SchemaField(
|
||||
description="URL of the GitHub repository",
|
||||
placeholder="https://github.com/owner/repo",
|
||||
)
|
||||
check_run_id: int = SchemaField(
|
||||
description="The ID of the check run to update",
|
||||
)
|
||||
status: ChecksStatus = SchemaField(
|
||||
description="New status of the check run",
|
||||
)
|
||||
conclusion: ChecksConclusion = SchemaField(
|
||||
description="The final conclusion of the check (required if status is completed)",
|
||||
)
|
||||
output_title: Optional[str] = SchemaField(
|
||||
description="New title of the check run output",
|
||||
default=None,
|
||||
)
|
||||
output_summary: Optional[str] = SchemaField(
|
||||
description="New summary of the check run output",
|
||||
default=None,
|
||||
)
|
||||
output_text: Optional[str] = SchemaField(
|
||||
description="New detailed text of the check run output",
|
||||
default=None,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
class CheckRunResult(BaseModel):
|
||||
id: int
|
||||
html_url: str
|
||||
status: str
|
||||
conclusion: Optional[str]
|
||||
|
||||
check_run: CheckRunResult = SchemaField(
|
||||
description="Details of the updated check run"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if check run update failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8a23c567-9d01-4e56-b789-0c12d3e45678", # Generated UUID
|
||||
description="Updates an existing check run in a GitHub repository",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubUpdateCheckRunBlock.Input,
|
||||
output_schema=GithubUpdateCheckRunBlock.Output,
|
||||
# requires a github app not available to oauth in our current system
|
||||
disabled=True,
|
||||
test_input={
|
||||
"repo_url": "https://github.com/owner/repo",
|
||||
"check_run_id": 4,
|
||||
"status": ChecksStatus.COMPLETED.value,
|
||||
"conclusion": ChecksConclusion.SUCCESS.value,
|
||||
"output_title": "Updated Results",
|
||||
"output_summary": "All tests passed after retry",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"check_run",
|
||||
{
|
||||
"id": 4,
|
||||
"html_url": "https://github.com/owner/repo/runs/4",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"update_check_run": lambda *args, **kwargs: {
|
||||
"id": 4,
|
||||
"html_url": "https://github.com/owner/repo/runs/4",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_check_run(
|
||||
credentials: GithubCredentials,
|
||||
repo_url: str,
|
||||
check_run_id: int,
|
||||
status: ChecksStatus,
|
||||
conclusion: Optional[ChecksConclusion] = None,
|
||||
output_title: Optional[str] = None,
|
||||
output_summary: Optional[str] = None,
|
||||
output_text: Optional[str] = None,
|
||||
) -> dict:
|
||||
api = get_api(credentials)
|
||||
|
||||
class UpdateCheckRunData(BaseModel):
|
||||
status: str
|
||||
conclusion: Optional[str] = None
|
||||
output: Optional[dict[str, str]] = None
|
||||
|
||||
data = UpdateCheckRunData(
|
||||
status=status.value,
|
||||
)
|
||||
|
||||
if conclusion:
|
||||
data.conclusion = conclusion.value
|
||||
|
||||
if output_title or output_summary or output_text:
|
||||
output_data = {
|
||||
"title": output_title or "",
|
||||
"summary": output_summary or "",
|
||||
"text": output_text or "",
|
||||
}
|
||||
data.output = output_data
|
||||
|
||||
check_run_url = f"{repo_url}/check-runs/{check_run_id}"
|
||||
response = api.patch(
|
||||
check_run_url, data=data.model_dump_json(exclude_none=True)
|
||||
)
|
||||
result = response.json()
|
||||
|
||||
return {
|
||||
"id": result["id"],
|
||||
"html_url": result["html_url"],
|
||||
"status": result["status"],
|
||||
"conclusion": result.get("conclusion"),
|
||||
}
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GithubCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self.update_check_run(
|
||||
credentials=credentials,
|
||||
repo_url=input_data.repo_url,
|
||||
check_run_id=input_data.check_run_id,
|
||||
status=input_data.status,
|
||||
conclusion=input_data.conclusion,
|
||||
output_title=input_data.output_title,
|
||||
output_summary=input_data.output_summary,
|
||||
output_text=input_data.output_text,
|
||||
)
|
||||
yield "check_run", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from typing_extensions import TypedDict
|
||||
@@ -6,7 +5,7 @@ from typing_extensions import TypedDict
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import convert_comment_url_to_api_endpoint, get_api
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
@@ -15,8 +14,6 @@ from ._auth import (
|
||||
GithubCredentialsInput,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def is_github_url(url: str) -> bool:
|
||||
return urlparse(url).netloc == "github.com"
|
||||
@@ -111,228 +108,6 @@ class GithubCommentBlock(Block):
|
||||
# --8<-- [end:GithubCommentBlockExample]
|
||||
|
||||
|
||||
class GithubUpdateCommentBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
comment_url: str = SchemaField(
|
||||
description="URL of the GitHub comment",
|
||||
placeholder="https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
default="",
|
||||
advanced=False,
|
||||
)
|
||||
issue_url: str = SchemaField(
|
||||
description="URL of the GitHub issue or pull request",
|
||||
placeholder="https://github.com/owner/repo/issues/1",
|
||||
default="",
|
||||
)
|
||||
comment_id: str = SchemaField(
|
||||
description="ID of the GitHub comment",
|
||||
placeholder="123456789",
|
||||
default="",
|
||||
)
|
||||
comment: str = SchemaField(
|
||||
description="Comment to update",
|
||||
placeholder="Enter your comment",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: int = SchemaField(description="ID of the updated comment")
|
||||
url: str = SchemaField(description="URL to the comment on GitHub")
|
||||
error: str = SchemaField(
|
||||
description="Error message if the comment update failed"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b3f4d747-10e3-4e69-8c51-f2be1d99c9a7",
|
||||
description="This block updates a comment on a specified GitHub issue or pull request.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubUpdateCommentBlock.Input,
|
||||
output_schema=GithubUpdateCommentBlock.Output,
|
||||
test_input={
|
||||
"comment_url": "https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
"comment": "This is an updated comment.",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", 123456789),
|
||||
(
|
||||
"url",
|
||||
"https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"update_comment": lambda *args, **kwargs: (
|
||||
123456789,
|
||||
"https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_comment(
|
||||
credentials: GithubCredentials, comment_url: str, body_text: str
|
||||
) -> tuple[int, str]:
|
||||
api = get_api(credentials, convert_urls=False)
|
||||
data = {"body": body_text}
|
||||
url = convert_comment_url_to_api_endpoint(comment_url)
|
||||
|
||||
logger.info(url)
|
||||
response = api.patch(url, json=data)
|
||||
comment = response.json()
|
||||
return comment["id"], comment["html_url"]
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GithubCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
if (
|
||||
not input_data.comment_url
|
||||
and input_data.comment_id
|
||||
and input_data.issue_url
|
||||
):
|
||||
parsed_url = urlparse(input_data.issue_url)
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
owner, repo = path_parts[0], path_parts[1]
|
||||
|
||||
input_data.comment_url = f"https://api.github.com/repos/{owner}/{repo}/issues/comments/{input_data.comment_id}"
|
||||
|
||||
elif (
|
||||
not input_data.comment_url
|
||||
and not input_data.comment_id
|
||||
and input_data.issue_url
|
||||
):
|
||||
raise ValueError(
|
||||
"Must provide either comment_url or comment_id and issue_url"
|
||||
)
|
||||
id, url = self.update_comment(
|
||||
credentials,
|
||||
input_data.comment_url,
|
||||
input_data.comment,
|
||||
)
|
||||
yield "id", id
|
||||
yield "url", url
|
||||
|
||||
|
||||
class GithubListCommentsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
issue_url: str = SchemaField(
|
||||
description="URL of the GitHub issue or pull request",
|
||||
placeholder="https://github.com/owner/repo/issues/1",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
class CommentItem(TypedDict):
|
||||
id: int
|
||||
body: str
|
||||
user: str
|
||||
url: str
|
||||
|
||||
comment: CommentItem = SchemaField(
|
||||
title="Comment", description="Comments with their ID, body, user, and URL"
|
||||
)
|
||||
comments: list[CommentItem] = SchemaField(
|
||||
description="List of comments with their ID, body, user, and URL"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if listing comments failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c4b5fb63-0005-4a11-b35a-0c2467bd6b59",
|
||||
description="This block lists all comments for a specified GitHub issue or pull request.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubListCommentsBlock.Input,
|
||||
output_schema=GithubListCommentsBlock.Output,
|
||||
test_input={
|
||||
"issue_url": "https://github.com/owner/repo/issues/1",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"comment",
|
||||
{
|
||||
"id": 123456789,
|
||||
"body": "This is a test comment.",
|
||||
"user": "test_user",
|
||||
"url": "https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
},
|
||||
),
|
||||
(
|
||||
"comments",
|
||||
[
|
||||
{
|
||||
"id": 123456789,
|
||||
"body": "This is a test comment.",
|
||||
"user": "test_user",
|
||||
"url": "https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"list_comments": lambda *args, **kwargs: [
|
||||
{
|
||||
"id": 123456789,
|
||||
"body": "This is a test comment.",
|
||||
"user": "test_user",
|
||||
"url": "https://github.com/owner/repo/issues/1#issuecomment-123456789",
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def list_comments(
|
||||
credentials: GithubCredentials, issue_url: str
|
||||
) -> list[Output.CommentItem]:
|
||||
parsed_url = urlparse(issue_url)
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
|
||||
owner = path_parts[0]
|
||||
repo = path_parts[1]
|
||||
|
||||
# GitHub API uses 'issues' for both issues and pull requests when it comes to comments
|
||||
issue_number = path_parts[3] # Whether 'issues/123' or 'pull/123'
|
||||
|
||||
# Construct the proper API URL directly
|
||||
api_url = f"https://api.github.com/repos/{owner}/{repo}/issues/{issue_number}/comments"
|
||||
|
||||
# Set convert_urls=False since we're already providing an API URL
|
||||
api = get_api(credentials, convert_urls=False)
|
||||
response = api.get(api_url)
|
||||
comments = response.json()
|
||||
parsed_comments: list[GithubListCommentsBlock.Output.CommentItem] = [
|
||||
{
|
||||
"id": comment["id"],
|
||||
"body": comment["body"],
|
||||
"user": comment["user"]["login"],
|
||||
"url": comment["html_url"],
|
||||
}
|
||||
for comment in comments
|
||||
]
|
||||
return parsed_comments
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GithubCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
comments = self.list_comments(
|
||||
credentials,
|
||||
input_data.issue_url,
|
||||
)
|
||||
yield from (("comment", comment) for comment in comments)
|
||||
yield "comments", comments
|
||||
|
||||
|
||||
class GithubMakeIssueBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
|
||||
@@ -200,7 +200,6 @@ class GithubReadPullRequestBlock(Block):
|
||||
include_pr_changes: bool = SchemaField(
|
||||
description="Whether to include the changes made in the pull request",
|
||||
default=False,
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._api import get_api
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GithubFineGrainedAPICredentials,
|
||||
GithubFineGrainedAPICredentialsField,
|
||||
GithubFineGrainedAPICredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class StatusState(Enum):
|
||||
ERROR = "error"
|
||||
FAILURE = "failure"
|
||||
PENDING = "pending"
|
||||
SUCCESS = "success"
|
||||
|
||||
|
||||
class GithubCreateStatusBlock(Block):
|
||||
"""Block for creating a commit status on a GitHub repository."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubFineGrainedAPICredentialsInput = (
|
||||
GithubFineGrainedAPICredentialsField("repo:status")
|
||||
)
|
||||
repo_url: str = SchemaField(
|
||||
description="URL of the GitHub repository",
|
||||
placeholder="https://github.com/owner/repo",
|
||||
)
|
||||
sha: str = SchemaField(
|
||||
description="The SHA of the commit to set status for",
|
||||
)
|
||||
state: StatusState = SchemaField(
|
||||
description="The state of the status (error, failure, pending, success)",
|
||||
)
|
||||
target_url: Optional[str] = SchemaField(
|
||||
description="URL with additional details about this status",
|
||||
default=None,
|
||||
)
|
||||
description: Optional[str] = SchemaField(
|
||||
description="Short description of the status",
|
||||
default=None,
|
||||
)
|
||||
check_name: Optional[str] = SchemaField(
|
||||
description="Label to differentiate this status from others",
|
||||
default="AutoGPT Platform Checks",
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
class StatusResult(BaseModel):
|
||||
id: int
|
||||
url: str
|
||||
state: str
|
||||
context: str
|
||||
description: Optional[str]
|
||||
target_url: Optional[str]
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
status: StatusResult = SchemaField(description="Details of the created status")
|
||||
error: str = SchemaField(description="Error message if status creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3d67f123-a4b5-4c89-9d01-2e34f5c67890", # Generated UUID
|
||||
description="Creates a new commit status in a GitHub repository",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS},
|
||||
input_schema=GithubCreateStatusBlock.Input,
|
||||
output_schema=GithubCreateStatusBlock.Output,
|
||||
test_input={
|
||||
"repo_url": "https://github.com/owner/repo",
|
||||
"sha": "ce587453ced02b1526dfb4cb910479d431683101",
|
||||
"state": StatusState.SUCCESS.value,
|
||||
"target_url": "https://example.com/build/status",
|
||||
"description": "The build succeeded!",
|
||||
"check_name": "continuous-integration/jenkins",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"status",
|
||||
{
|
||||
"id": 1234567890,
|
||||
"url": "https://api.github.com/repos/owner/repo/statuses/ce587453ced02b1526dfb4cb910479d431683101",
|
||||
"state": "success",
|
||||
"context": "continuous-integration/jenkins",
|
||||
"description": "The build succeeded!",
|
||||
"target_url": "https://example.com/build/status",
|
||||
"created_at": "2024-01-21T10:00:00Z",
|
||||
"updated_at": "2024-01-21T10:00:00Z",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"create_status": lambda *args, **kwargs: {
|
||||
"id": 1234567890,
|
||||
"url": "https://api.github.com/repos/owner/repo/statuses/ce587453ced02b1526dfb4cb910479d431683101",
|
||||
"state": "success",
|
||||
"context": "continuous-integration/jenkins",
|
||||
"description": "The build succeeded!",
|
||||
"target_url": "https://example.com/build/status",
|
||||
"created_at": "2024-01-21T10:00:00Z",
|
||||
"updated_at": "2024-01-21T10:00:00Z",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_status(
|
||||
credentials: GithubFineGrainedAPICredentials,
|
||||
repo_url: str,
|
||||
sha: str,
|
||||
state: StatusState,
|
||||
target_url: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
context: str = "default",
|
||||
) -> dict:
|
||||
api = get_api(credentials)
|
||||
|
||||
class StatusData(BaseModel):
|
||||
state: str
|
||||
target_url: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
context: str
|
||||
|
||||
data = StatusData(
|
||||
state=state.value,
|
||||
context=context,
|
||||
)
|
||||
|
||||
if target_url:
|
||||
data.target_url = target_url
|
||||
|
||||
if description:
|
||||
data.description = description
|
||||
|
||||
status_url = f"{repo_url}/statuses/{sha}"
|
||||
response = api.post(status_url, data=data.model_dump_json(exclude_none=True))
|
||||
result = response.json()
|
||||
|
||||
return {
|
||||
"id": result["id"],
|
||||
"url": result["url"],
|
||||
"state": result["state"],
|
||||
"context": result["context"],
|
||||
"description": result.get("description"),
|
||||
"target_url": result.get("target_url"),
|
||||
"created_at": result["created_at"],
|
||||
"updated_at": result["updated_at"],
|
||||
}
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: GithubFineGrainedAPICredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
result = self.create_status(
|
||||
credentials=credentials,
|
||||
repo_url=input_data.repo_url,
|
||||
sha=input_data.sha,
|
||||
state=input_data.state,
|
||||
target_url=input_data.target_url,
|
||||
description=input_data.description,
|
||||
context=input_data.check_name or "AutoGPT Platform Checks",
|
||||
)
|
||||
yield "status", result
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -12,7 +12,6 @@ from backend.data.block import (
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
@@ -37,7 +36,7 @@ class GitHubTriggerBase:
|
||||
placeholder="{owner}/{repo}",
|
||||
)
|
||||
# --8<-- [start:example-payload-field]
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
# --8<-- [end:example-payload-field]
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -124,7 +123,7 @@ class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
output_schema=GithubPullRequestTriggerBlock.Output,
|
||||
# --8<-- [start:example-webhook_config]
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.GITHUB,
|
||||
provider="github",
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
|
||||
@@ -1,598 +0,0 @@
|
||||
import enum
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Literal
|
||||
|
||||
from google.oauth2.credentials import Credentials
|
||||
from googleapiclient.discovery import build
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.settings import AppEnvironment, Settings
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GoogleCredentials,
|
||||
GoogleCredentialsField,
|
||||
GoogleCredentialsInput,
|
||||
)
|
||||
|
||||
|
||||
class CalendarEvent(BaseModel):
|
||||
"""Structured representation of a Google Calendar event."""
|
||||
|
||||
id: str
|
||||
title: str
|
||||
start_time: str
|
||||
end_time: str
|
||||
is_all_day: bool
|
||||
location: str | None
|
||||
description: str | None
|
||||
organizer: str | None
|
||||
attendees: list[str]
|
||||
has_video_call: bool
|
||||
video_link: str | None
|
||||
calendar_link: str
|
||||
is_recurring: bool
|
||||
|
||||
|
||||
class GoogleCalendarReadEventsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/calendar.readonly"]
|
||||
)
|
||||
calendar_id: str = SchemaField(
|
||||
description="Calendar ID (use 'primary' for your main calendar)",
|
||||
default="primary",
|
||||
)
|
||||
max_events: int = SchemaField(
|
||||
description="Maximum number of events to retrieve", default=10
|
||||
)
|
||||
start_time: datetime = SchemaField(
|
||||
description="Retrieve events starting from this time",
|
||||
default_factory=lambda: datetime.now(tz=timezone.utc),
|
||||
)
|
||||
time_range_days: int = SchemaField(
|
||||
description="Number of days to look ahead for events", default=30
|
||||
)
|
||||
search_term: str | None = SchemaField(
|
||||
description="Optional search term to filter events by", default=None
|
||||
)
|
||||
|
||||
page_token: str | None = SchemaField(
|
||||
description="Page token from previous request to get the next batch of events. You can use this if you have lots of events you want to process in a loop",
|
||||
default=None,
|
||||
)
|
||||
include_declined_events: bool = SchemaField(
|
||||
description="Include events you've declined", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
events: list[CalendarEvent] = SchemaField(
|
||||
description="List of calendar events in the requested time range",
|
||||
default_factory=list,
|
||||
)
|
||||
event: CalendarEvent = SchemaField(
|
||||
description="One of the calendar events in the requested time range"
|
||||
)
|
||||
next_page_token: str | None = SchemaField(
|
||||
description="Token for retrieving the next page of events if more exist",
|
||||
default=None,
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the request failed",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
|
||||
# Create realistic test data for events
|
||||
test_now = datetime.now(tz=timezone.utc)
|
||||
test_tomorrow = test_now + timedelta(days=1)
|
||||
|
||||
test_event_dict = {
|
||||
"id": "event1id",
|
||||
"title": "Team Meeting",
|
||||
"start_time": test_tomorrow.strftime("%Y-%m-%d %H:%M"),
|
||||
"end_time": (test_tomorrow + timedelta(hours=1)).strftime("%Y-%m-%d %H:%M"),
|
||||
"is_all_day": False,
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync",
|
||||
"organizer": "manager@example.com",
|
||||
"attendees": ["colleague1@example.com", "colleague2@example.com"],
|
||||
"has_video_call": True,
|
||||
"video_link": "https://meet.google.com/abc-defg-hij",
|
||||
"calendar_link": "https://calendar.google.com/calendar/event?eid=event1id",
|
||||
"is_recurring": True,
|
||||
}
|
||||
|
||||
super().__init__(
|
||||
id="80bc3ed1-e9a4-449e-8163-a8fc86f74f6a",
|
||||
description="Retrieves upcoming events from a Google Calendar with filtering options",
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.DATA},
|
||||
input_schema=GoogleCalendarReadEventsBlock.Input,
|
||||
output_schema=GoogleCalendarReadEventsBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"calendar_id": "primary",
|
||||
"max_events": 5,
|
||||
"start_time": test_now.isoformat(),
|
||||
"time_range_days": 7,
|
||||
"search_term": None,
|
||||
"include_declined_events": False,
|
||||
"page_token": None,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("event", test_event_dict),
|
||||
("events", [test_event_dict]),
|
||||
],
|
||||
test_mock={
|
||||
"_read_calendar": lambda *args, **kwargs: {
|
||||
"items": [
|
||||
{
|
||||
"id": "event1id",
|
||||
"summary": "Team Meeting",
|
||||
"start": {
|
||||
"dateTime": test_tomorrow.isoformat(),
|
||||
"timeZone": "UTC",
|
||||
},
|
||||
"end": {
|
||||
"dateTime": (
|
||||
test_tomorrow + timedelta(hours=1)
|
||||
).isoformat(),
|
||||
"timeZone": "UTC",
|
||||
},
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync",
|
||||
"organizer": {"email": "manager@example.com"},
|
||||
"attendees": [
|
||||
{"email": "colleague1@example.com"},
|
||||
{"email": "colleague2@example.com"},
|
||||
],
|
||||
"conferenceData": {
|
||||
"conferenceUrl": "https://meet.google.com/abc-defg-hij"
|
||||
},
|
||||
"htmlLink": "https://calendar.google.com/calendar/event?eid=event1id",
|
||||
"recurrence": ["RRULE:FREQ=WEEKLY;COUNT=10"],
|
||||
}
|
||||
],
|
||||
"nextPageToken": None,
|
||||
},
|
||||
"_format_events": lambda *args, **kwargs: [test_event_dict],
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
|
||||
# Calculate end time based on start time and time range
|
||||
end_time = input_data.start_time + timedelta(
|
||||
days=input_data.time_range_days
|
||||
)
|
||||
|
||||
# Call Google Calendar API
|
||||
result = self._read_calendar(
|
||||
service=service,
|
||||
calendarId=input_data.calendar_id,
|
||||
time_min=input_data.start_time.isoformat(),
|
||||
time_max=end_time.isoformat(),
|
||||
max_results=input_data.max_events,
|
||||
single_events=True,
|
||||
search_term=input_data.search_term,
|
||||
show_deleted=False,
|
||||
show_hidden=input_data.include_declined_events,
|
||||
page_token=input_data.page_token,
|
||||
)
|
||||
|
||||
# Format events into a user-friendly structure
|
||||
formatted_events = self._format_events(result.get("items", []))
|
||||
|
||||
# Include next page token if available
|
||||
if next_page_token := result.get("nextPageToken"):
|
||||
yield "next_page_token", next_page_token
|
||||
|
||||
for event in formatted_events:
|
||||
yield "event", event
|
||||
|
||||
yield "events", formatted_events
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("calendar", "v3", credentials=creds)
|
||||
|
||||
def _read_calendar(
|
||||
self,
|
||||
service,
|
||||
calendarId: str,
|
||||
time_min: str,
|
||||
time_max: str,
|
||||
max_results: int,
|
||||
single_events: bool,
|
||||
search_term: str | None = None,
|
||||
show_deleted: bool = False,
|
||||
show_hidden: bool = False,
|
||||
page_token: str | None = None,
|
||||
) -> dict:
|
||||
"""Read calendar events with optional filtering."""
|
||||
calendar = service.events()
|
||||
|
||||
# Build query parameters
|
||||
params = {
|
||||
"calendarId": calendarId,
|
||||
"timeMin": time_min,
|
||||
"timeMax": time_max,
|
||||
"maxResults": max_results,
|
||||
"singleEvents": single_events,
|
||||
"orderBy": "startTime",
|
||||
"showDeleted": show_deleted,
|
||||
"showHiddenInvitations": show_hidden,
|
||||
**({"pageToken": page_token} if page_token else {}),
|
||||
}
|
||||
|
||||
# Add search term if provided
|
||||
if search_term:
|
||||
params["q"] = search_term
|
||||
|
||||
result = calendar.list(**params).execute()
|
||||
return result
|
||||
|
||||
def _format_events(self, events: list[dict]) -> list[CalendarEvent]:
|
||||
"""Format Google Calendar API events into user-friendly structure."""
|
||||
formatted_events = []
|
||||
|
||||
for event in events:
|
||||
# Determine if all-day event
|
||||
is_all_day = "date" in event.get("start", {})
|
||||
|
||||
# Format start and end times
|
||||
if is_all_day:
|
||||
start_time = event.get("start", {}).get("date", "")
|
||||
end_time = event.get("end", {}).get("date", "")
|
||||
else:
|
||||
# Convert ISO format to more readable format
|
||||
start_datetime = datetime.fromisoformat(
|
||||
event.get("start", {}).get("dateTime", "").replace("Z", "+00:00")
|
||||
)
|
||||
end_datetime = datetime.fromisoformat(
|
||||
event.get("end", {}).get("dateTime", "").replace("Z", "+00:00")
|
||||
)
|
||||
start_time = start_datetime.strftime("%Y-%m-%d %H:%M")
|
||||
end_time = end_datetime.strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
# Extract attendees
|
||||
attendees = []
|
||||
for attendee in event.get("attendees", []):
|
||||
if email := attendee.get("email"):
|
||||
attendees.append(email)
|
||||
|
||||
# Check for video call link
|
||||
has_video_call = False
|
||||
video_link = None
|
||||
if conf_data := event.get("conferenceData"):
|
||||
if conf_url := conf_data.get("conferenceUrl"):
|
||||
has_video_call = True
|
||||
video_link = conf_url
|
||||
elif entry_points := conf_data.get("entryPoints", []):
|
||||
for entry in entry_points:
|
||||
if entry.get("entryPointType") == "video":
|
||||
has_video_call = True
|
||||
video_link = entry.get("uri")
|
||||
break
|
||||
|
||||
# Create formatted event
|
||||
formatted_event = CalendarEvent(
|
||||
id=event.get("id", ""),
|
||||
title=event.get("summary", "Untitled Event"),
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
is_all_day=is_all_day,
|
||||
location=event.get("location"),
|
||||
description=event.get("description"),
|
||||
organizer=event.get("organizer", {}).get("email"),
|
||||
attendees=attendees,
|
||||
has_video_call=has_video_call,
|
||||
video_link=video_link,
|
||||
calendar_link=event.get("htmlLink", ""),
|
||||
is_recurring=bool(event.get("recurrence")),
|
||||
)
|
||||
|
||||
formatted_events.append(formatted_event)
|
||||
|
||||
return formatted_events
|
||||
|
||||
|
||||
class ReminderPreset(enum.Enum):
|
||||
"""Common reminder times before an event."""
|
||||
|
||||
TEN_MINUTES = 10
|
||||
THIRTY_MINUTES = 30
|
||||
ONE_HOUR = 60
|
||||
ONE_DAY = 1440 # 24 hours in minutes
|
||||
|
||||
|
||||
class RecurrenceFrequency(enum.Enum):
|
||||
"""Frequency options for recurring events."""
|
||||
|
||||
DAILY = "DAILY"
|
||||
WEEKLY = "WEEKLY"
|
||||
MONTHLY = "MONTHLY"
|
||||
YEARLY = "YEARLY"
|
||||
|
||||
|
||||
class ExactTiming(BaseModel):
|
||||
"""Model for specifying start and end times."""
|
||||
|
||||
discriminator: Literal["exact_timing"]
|
||||
start_datetime: datetime
|
||||
end_datetime: datetime
|
||||
|
||||
|
||||
class DurationTiming(BaseModel):
|
||||
"""Model for specifying start time and duration."""
|
||||
|
||||
discriminator: Literal["duration_timing"]
|
||||
start_datetime: datetime
|
||||
duration_minutes: int
|
||||
|
||||
|
||||
class OneTimeEvent(BaseModel):
|
||||
"""Model for a one-time event."""
|
||||
|
||||
discriminator: Literal["one_time"]
|
||||
|
||||
|
||||
class RecurringEvent(BaseModel):
|
||||
"""Model for a recurring event."""
|
||||
|
||||
discriminator: Literal["recurring"]
|
||||
frequency: RecurrenceFrequency
|
||||
count: int
|
||||
|
||||
|
||||
class GoogleCalendarCreateEventBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: GoogleCredentialsInput = GoogleCredentialsField(
|
||||
["https://www.googleapis.com/auth/calendar"]
|
||||
)
|
||||
# Event Details
|
||||
event_title: str = SchemaField(description="Title of the event")
|
||||
location: str | None = SchemaField(
|
||||
description="Location of the event", default=None
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="Description of the event", default=None
|
||||
)
|
||||
|
||||
# Timing
|
||||
timing: ExactTiming | DurationTiming = SchemaField(
|
||||
discriminator="discriminator",
|
||||
advanced=False,
|
||||
description="Specify when the event starts and ends",
|
||||
default_factory=lambda: DurationTiming(
|
||||
discriminator="duration_timing",
|
||||
start_datetime=datetime.now().replace(microsecond=0, second=0, minute=0)
|
||||
+ timedelta(hours=1),
|
||||
duration_minutes=60,
|
||||
),
|
||||
)
|
||||
|
||||
# Calendar selection
|
||||
calendar_id: str = SchemaField(
|
||||
description="Calendar ID (use 'primary' for your main calendar)",
|
||||
default="primary",
|
||||
)
|
||||
|
||||
# Guests
|
||||
guest_emails: list[str] = SchemaField(
|
||||
description="Email addresses of guests to invite", default_factory=list
|
||||
)
|
||||
send_notifications: bool = SchemaField(
|
||||
description="Send email notifications to guests", default=True
|
||||
)
|
||||
|
||||
# Extras
|
||||
add_google_meet: bool = SchemaField(
|
||||
description="Include a Google Meet video conference link", default=False
|
||||
)
|
||||
recurrence: OneTimeEvent | RecurringEvent = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Whether the event repeats",
|
||||
default_factory=lambda: OneTimeEvent(discriminator="one_time"),
|
||||
)
|
||||
reminder_minutes: list[ReminderPreset] = SchemaField(
|
||||
description="When to send reminders before the event",
|
||||
default_factory=lambda: [ReminderPreset.TEN_MINUTES],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
event_id: str = SchemaField(description="ID of the created event")
|
||||
event_link: str = SchemaField(
|
||||
description="Link to view the event in Google Calendar"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if event creation failed")
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
|
||||
super().__init__(
|
||||
id="ed2ec950-fbff-4204-94c0-023fb1d625e0",
|
||||
description="This block creates a new event in Google Calendar with customizable parameters.",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=GoogleCalendarCreateEventBlock.Input,
|
||||
output_schema=GoogleCalendarCreateEventBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"event_title": "Team Meeting",
|
||||
"location": "Conference Room A",
|
||||
"description": "Weekly team sync-up",
|
||||
"calendar_id": "primary",
|
||||
"guest_emails": ["colleague1@example.com", "colleague2@example.com"],
|
||||
"add_google_meet": True,
|
||||
"send_notifications": True,
|
||||
"reminder_minutes": [
|
||||
ReminderPreset.TEN_MINUTES.value,
|
||||
ReminderPreset.ONE_HOUR.value,
|
||||
],
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("event_id", "abc123event_id"),
|
||||
("event_link", "https://calendar.google.com/calendar/event?eid=abc123"),
|
||||
],
|
||||
test_mock={
|
||||
"_create_event": lambda *args, **kwargs: {
|
||||
"id": "abc123event_id",
|
||||
"htmlLink": "https://calendar.google.com/calendar/event?eid=abc123",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
service = self._build_service(credentials, **kwargs)
|
||||
|
||||
# Get start and end times based on the timing option
|
||||
if input_data.timing.discriminator == "exact_timing":
|
||||
start_datetime = input_data.timing.start_datetime
|
||||
end_datetime = input_data.timing.end_datetime
|
||||
else: # duration_timing
|
||||
start_datetime = input_data.timing.start_datetime
|
||||
end_datetime = start_datetime + timedelta(
|
||||
minutes=input_data.timing.duration_minutes
|
||||
)
|
||||
|
||||
# Format datetimes for Google Calendar API
|
||||
start_time_str = start_datetime.isoformat()
|
||||
end_time_str = end_datetime.isoformat()
|
||||
|
||||
# Build the event body
|
||||
event_body = {
|
||||
"summary": input_data.event_title,
|
||||
"start": {"dateTime": start_time_str},
|
||||
"end": {"dateTime": end_time_str},
|
||||
}
|
||||
|
||||
# Add optional fields
|
||||
if input_data.location:
|
||||
event_body["location"] = input_data.location
|
||||
|
||||
if input_data.description:
|
||||
event_body["description"] = input_data.description
|
||||
|
||||
# Add guests
|
||||
if input_data.guest_emails:
|
||||
event_body["attendees"] = [
|
||||
{"email": email} for email in input_data.guest_emails
|
||||
]
|
||||
|
||||
# Add reminders
|
||||
if input_data.reminder_minutes:
|
||||
event_body["reminders"] = {
|
||||
"useDefault": False,
|
||||
"overrides": [
|
||||
{"method": "popup", "minutes": reminder.value}
|
||||
for reminder in input_data.reminder_minutes
|
||||
],
|
||||
}
|
||||
|
||||
# Add Google Meet
|
||||
if input_data.add_google_meet:
|
||||
event_body["conferenceData"] = {
|
||||
"createRequest": {
|
||||
"requestId": f"meet-{uuid.uuid4()}",
|
||||
"conferenceSolutionKey": {"type": "hangoutsMeet"},
|
||||
}
|
||||
}
|
||||
|
||||
# Add recurrence
|
||||
if input_data.recurrence.discriminator == "recurring":
|
||||
rule = f"RRULE:FREQ={input_data.recurrence.frequency.value}"
|
||||
rule += f";COUNT={input_data.recurrence.count}"
|
||||
event_body["recurrence"] = [rule]
|
||||
|
||||
# Create the event
|
||||
result = self._create_event(
|
||||
service=service,
|
||||
calendar_id=input_data.calendar_id,
|
||||
event_body=event_body,
|
||||
send_notifications=input_data.send_notifications,
|
||||
conference_data_version=1 if input_data.add_google_meet else 0,
|
||||
)
|
||||
|
||||
yield "event_id", result.get("id", "")
|
||||
yield "event_link", result.get("htmlLink", "")
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
@staticmethod
|
||||
def _build_service(credentials: GoogleCredentials, **kwargs):
|
||||
creds = Credentials(
|
||||
token=(
|
||||
credentials.access_token.get_secret_value()
|
||||
if credentials.access_token
|
||||
else None
|
||||
),
|
||||
refresh_token=(
|
||||
credentials.refresh_token.get_secret_value()
|
||||
if credentials.refresh_token
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("calendar", "v3", credentials=creds)
|
||||
|
||||
def _create_event(
|
||||
self,
|
||||
service,
|
||||
calendar_id: str,
|
||||
event_body: dict,
|
||||
send_notifications: bool = False,
|
||||
conference_data_version: int = 0,
|
||||
) -> dict:
|
||||
"""Create a new event in Google Calendar."""
|
||||
calendar = service.events()
|
||||
|
||||
# Make the API call
|
||||
result = calendar.insert(
|
||||
calendarId=calendar_id,
|
||||
body=event_body,
|
||||
sendNotifications=send_notifications,
|
||||
conferenceDataVersion=conference_data_version,
|
||||
).execute()
|
||||
|
||||
return result
|
||||
@@ -8,7 +8,6 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
@@ -151,8 +150,8 @@ class GmailReadBlock(Block):
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
client_id=kwargs.get("client_id"),
|
||||
client_secret=kwargs.get("client_secret"),
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("gmail", "v1", credentials=creds)
|
||||
|
||||
@@ -3,7 +3,6 @@ from googleapiclient.discovery import build
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.settings import AppEnvironment, Settings
|
||||
|
||||
from ._auth import (
|
||||
GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
@@ -36,15 +35,13 @@ class GoogleSheetsReadBlock(Block):
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
settings = Settings()
|
||||
super().__init__(
|
||||
id="5724e902-3635-47e9-a108-aaa0263a4988",
|
||||
description="This block reads data from a Google Sheets spreadsheet.",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=GoogleSheetsReadBlock.Input,
|
||||
output_schema=GoogleSheetsReadBlock.Output,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED
|
||||
or settings.config.app_env == AppEnvironment.PRODUCTION,
|
||||
disabled=not GOOGLE_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"spreadsheet_id": "1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms",
|
||||
"range": "Sheet1!A1:B2",
|
||||
@@ -89,8 +86,8 @@ class GoogleSheetsReadBlock(Block):
|
||||
else None
|
||||
),
|
||||
token_uri="https://oauth2.googleapis.com/token",
|
||||
client_id=Settings().secrets.google_client_id,
|
||||
client_secret=Settings().secrets.google_client_secret,
|
||||
client_id=kwargs.get("client_id"),
|
||||
client_secret=kwargs.get("client_secret"),
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
return build("sheets", "v4", credentials=creds)
|
||||
|
||||
@@ -1,18 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
from enum import Enum
|
||||
from io import BufferedReader
|
||||
from typing import Any
|
||||
|
||||
from requests.exceptions import HTTPError, RequestException
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
|
||||
from backend.util.request import requests
|
||||
|
||||
logger = logging.getLogger(name=__name__)
|
||||
|
||||
|
||||
class HttpMethod(Enum):
|
||||
GET = "GET"
|
||||
@@ -36,7 +29,7 @@ class SendWebRequestBlock(Block):
|
||||
)
|
||||
headers: dict[str, str] = SchemaField(
|
||||
description="The headers to include in the request",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
json_format: bool = SchemaField(
|
||||
title="JSON format",
|
||||
@@ -47,16 +40,11 @@ class SendWebRequestBlock(Block):
|
||||
description="The body of the request",
|
||||
default=None,
|
||||
)
|
||||
files: dict[str, MediaFileType] = SchemaField(
|
||||
description="File fields mapping to MediaFileType for multipart upload",
|
||||
default_factory=dict,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: object = SchemaField(description="The response from the server")
|
||||
client_error: object = SchemaField(description="Errors on 4xx status codes")
|
||||
server_error: object = SchemaField(description="Errors on 5xx status codes")
|
||||
error: str = SchemaField(description="Errors for all other exceptions")
|
||||
client_error: object = SchemaField(description="The error on 4xx status codes")
|
||||
server_error: object = SchemaField(description="The error on 5xx status codes")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -67,7 +55,7 @@ class SendWebRequestBlock(Block):
|
||||
output_schema=SendWebRequestBlock.Output,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *, graph_exec_id: str, **kwargs) -> BlockOutput:
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
body = input_data.body
|
||||
|
||||
if input_data.json_format:
|
||||
@@ -80,76 +68,20 @@ class SendWebRequestBlock(Block):
|
||||
# we should send it as plain text instead
|
||||
input_data.json_format = False
|
||||
|
||||
# Prepare files for multipart upload using store_media_file
|
||||
files: dict[str, BufferedReader] = {}
|
||||
if input_data.files:
|
||||
for field_name, media in input_data.files.items():
|
||||
try:
|
||||
rel_path = store_media_file(
|
||||
graph_exec_id, media, return_content=False
|
||||
)
|
||||
abs_path = get_exec_file_path(graph_exec_id, rel_path)
|
||||
files[field_name] = open(abs_path, "rb")
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to prepare file '{field_name}': {e}"
|
||||
for f in files.values():
|
||||
try:
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
return
|
||||
|
||||
try:
|
||||
response = requests.request(
|
||||
input_data.method.value,
|
||||
input_data.url,
|
||||
headers=input_data.headers,
|
||||
files=files if files else None,
|
||||
json=body if input_data.json_format else None,
|
||||
data=body if not input_data.json_format else None,
|
||||
)
|
||||
|
||||
if input_data.json_format:
|
||||
if response.status_code == 204 or not response.content.strip():
|
||||
result = None
|
||||
else:
|
||||
result = response.json()
|
||||
else:
|
||||
result = response.text
|
||||
response = requests.request(
|
||||
input_data.method.value,
|
||||
input_data.url,
|
||||
headers=input_data.headers,
|
||||
json=body if input_data.json_format else None,
|
||||
data=body if not input_data.json_format else None,
|
||||
)
|
||||
result = response.json() if input_data.json_format else response.text
|
||||
|
||||
if response.status_code // 100 == 2:
|
||||
yield "response", result
|
||||
|
||||
except HTTPError as e:
|
||||
# Handle error responses
|
||||
try:
|
||||
result = e.response.json() if input_data.json_format else str(e)
|
||||
except json.JSONDecodeError:
|
||||
result = str(e)
|
||||
|
||||
if 400 <= e.response.status_code < 500:
|
||||
yield "client_error", result
|
||||
elif 500 <= e.response.status_code < 600:
|
||||
yield "server_error", result
|
||||
else:
|
||||
error_msg = (
|
||||
"Unexpected status code "
|
||||
f"{e.response.status_code} '{e.response.reason}'"
|
||||
)
|
||||
logger.warning(error_msg)
|
||||
yield "error", error_msg
|
||||
|
||||
except RequestException as e:
|
||||
# Handle other request-related exceptions
|
||||
yield "error", str(e)
|
||||
|
||||
except Exception as e:
|
||||
# Catch any other unexpected exceptions
|
||||
yield "error", str(e)
|
||||
|
||||
finally:
|
||||
# ensure cleanup of file handles
|
||||
for f in files.values():
|
||||
try:
|
||||
f.close()
|
||||
except Exception:
|
||||
pass
|
||||
elif response.status_code // 100 == 4:
|
||||
yield "client_error", result
|
||||
elif response.status_code // 100 == 5:
|
||||
yield "server_error", result
|
||||
else:
|
||||
raise ValueError(f"Unexpected status code: {response.status_code}")
|
||||
|
||||
@@ -15,8 +15,7 @@ class HubSpotCompanyBlock(Block):
|
||||
description="Operation to perform (create, update, get)", default="get"
|
||||
)
|
||||
company_data: dict = SchemaField(
|
||||
description="Company data for create/update operations",
|
||||
default_factory=dict,
|
||||
description="Company data for create/update operations", default={}
|
||||
)
|
||||
domain: str = SchemaField(
|
||||
description="Company domain for get/update operations", default=""
|
||||
|
||||
@@ -15,8 +15,7 @@ class HubSpotContactBlock(Block):
|
||||
description="Operation to perform (create, update, get)", default="get"
|
||||
)
|
||||
contact_data: dict = SchemaField(
|
||||
description="Contact data for create/update operations",
|
||||
default_factory=dict,
|
||||
description="Contact data for create/update operations", default={}
|
||||
)
|
||||
email: str = SchemaField(
|
||||
description="Email address for get/update operations", default=""
|
||||
|
||||
@@ -19,7 +19,7 @@ class HubSpotEngagementBlock(Block):
|
||||
)
|
||||
email_data: dict = SchemaField(
|
||||
description="Email data including recipient, subject, content",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
contact_id: str = SchemaField(
|
||||
description="Contact ID for engagement tracking", default=""
|
||||
@@ -27,6 +27,7 @@ class HubSpotEngagementBlock(Block):
|
||||
timeframe_days: int = SchemaField(
|
||||
description="Number of days to look back for engagement",
|
||||
default=30,
|
||||
optional=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -142,16 +142,6 @@ class IdeogramModelBlock(Block):
|
||||
title="Color Palette Preset",
|
||||
advanced=True,
|
||||
)
|
||||
custom_color_palette: Optional[list[str]] = SchemaField(
|
||||
description=(
|
||||
"Only available for model version V_2 or V_2_TURBO. Provide one or more color hex codes "
|
||||
"(e.g., ['#000030', '#1C0C47', '#9900FF', '#4285F4', '#FFFFFF']) to define a custom color "
|
||||
"palette. Only used if 'color_palette_name' is 'NONE'."
|
||||
),
|
||||
default=None,
|
||||
title="Custom Color Palette",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: str = SchemaField(description="Generated image URL")
|
||||
@@ -161,7 +151,7 @@ class IdeogramModelBlock(Block):
|
||||
super().__init__(
|
||||
id="6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
||||
description="This block runs Ideogram models with both simple and advanced settings.",
|
||||
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=IdeogramModelBlock.Input,
|
||||
output_schema=IdeogramModelBlock.Output,
|
||||
test_input={
|
||||
@@ -174,13 +164,6 @@ class IdeogramModelBlock(Block):
|
||||
"style_type": StyleType.AUTO,
|
||||
"negative_prompt": None,
|
||||
"color_palette_name": ColorPalettePreset.NONE,
|
||||
"custom_color_palette": [
|
||||
"#000030",
|
||||
"#1C0C47",
|
||||
"#9900FF",
|
||||
"#4285F4",
|
||||
"#FFFFFF",
|
||||
],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
@@ -190,7 +173,7 @@ class IdeogramModelBlock(Block):
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"run_model": lambda api_key, model_name, prompt, seed, aspect_ratio, magic_prompt_option, style_type, negative_prompt, color_palette_name, custom_colors: "https://ideogram.ai/api/images/test-generated-image-url.png",
|
||||
"run_model": lambda api_key, model_name, prompt, seed, aspect_ratio, magic_prompt_option, style_type, negative_prompt, color_palette_name: "https://ideogram.ai/api/images/test-generated-image-url.png",
|
||||
"upscale_image": lambda api_key, image_url: "https://ideogram.ai/api/images/test-upscaled-image-url.png",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
@@ -212,7 +195,6 @@ class IdeogramModelBlock(Block):
|
||||
style_type=input_data.style_type.value,
|
||||
negative_prompt=input_data.negative_prompt,
|
||||
color_palette_name=input_data.color_palette_name.value,
|
||||
custom_colors=input_data.custom_color_palette,
|
||||
)
|
||||
|
||||
# Step 2: Upscale the image if requested
|
||||
@@ -235,7 +217,6 @@ class IdeogramModelBlock(Block):
|
||||
style_type: str,
|
||||
negative_prompt: Optional[str],
|
||||
color_palette_name: str,
|
||||
custom_colors: Optional[list[str]],
|
||||
):
|
||||
url = "https://api.ideogram.ai/generate"
|
||||
headers = {
|
||||
@@ -260,11 +241,7 @@ class IdeogramModelBlock(Block):
|
||||
data["image_request"]["negative_prompt"] = negative_prompt
|
||||
|
||||
if color_palette_name != "NONE":
|
||||
data["color_palette"] = {"name": color_palette_name}
|
||||
elif custom_colors:
|
||||
data["color_palette"] = {
|
||||
"members": [{"color_hex": color} for color in custom_colors]
|
||||
}
|
||||
data["image_request"]["color_palette"] = {"name": color_palette_name}
|
||||
|
||||
try:
|
||||
response = requests.post(url, json=data, headers=headers)
|
||||
@@ -290,7 +267,9 @@ class IdeogramModelBlock(Block):
|
||||
response = requests.post(
|
||||
url,
|
||||
headers=headers,
|
||||
data={"image_request": "{}"},
|
||||
data={
|
||||
"image_request": "{}", # Empty JSON object
|
||||
},
|
||||
files=files,
|
||||
)
|
||||
|
||||
|
||||
@@ -1,556 +0,0 @@
|
||||
import copy
|
||||
from datetime import date, time
|
||||
from typing import Any, Optional
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import store_media_file
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.settings import Config
|
||||
from backend.util.text import TextFormatter
|
||||
from backend.util.type import LongTextType, MediaFileType, ShortTextType
|
||||
|
||||
formatter = TextFormatter()
|
||||
config = Config()
|
||||
|
||||
|
||||
class AgentInputBlock(Block):
|
||||
"""
|
||||
This block is used to provide input to the graph.
|
||||
|
||||
It takes in a value, name, description, default values list and bool to limit selection to default values.
|
||||
|
||||
It Outputs the value passed as input.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
value: Any = SchemaField(
|
||||
description="The value to be passed as input.",
|
||||
default=None,
|
||||
)
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the input.", default=None, advanced=True
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the input.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: list = SchemaField(
|
||||
description="The placeholder values to be passed as input.",
|
||||
default_factory=list,
|
||||
advanced=True,
|
||||
hidden=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to show the input in the advanced section, if the field is not required.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the input should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
def generate_schema(self):
|
||||
schema = copy.deepcopy(self.get_field_schema("value"))
|
||||
if possible_values := self.placeholder_values:
|
||||
schema["enum"] = possible_values
|
||||
return schema
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: Any = SchemaField(description="The value passed as input.")
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
super().__init__(
|
||||
**{
|
||||
"id": "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
"description": "Base block for user inputs.",
|
||||
"input_schema": AgentInputBlock.Input,
|
||||
"output_schema": AgentInputBlock.Output,
|
||||
"test_input": [
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_1",
|
||||
"description": "Example test input.",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "input_2",
|
||||
"description": "Example test input with placeholders.",
|
||||
"placeholder_values": ["Hello, World!"],
|
||||
},
|
||||
],
|
||||
"test_output": [
|
||||
("result", "Hello, World!"),
|
||||
("result", "Hello, World!"),
|
||||
],
|
||||
"categories": {BlockCategory.INPUT, BlockCategory.BASIC},
|
||||
"block_type": BlockType.INPUT,
|
||||
"static_output": True,
|
||||
**kwargs,
|
||||
}
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *args, **kwargs) -> BlockOutput:
|
||||
if input_data.value is not None:
|
||||
yield "result", input_data.value
|
||||
|
||||
|
||||
class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
Behavior:
|
||||
If `format` is provided and the `value` is of a type that can be formatted,
|
||||
the block attempts to format the recorded_value using the `format`.
|
||||
If formatting fails or no `format` is provided, the raw `value` is output.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to be recorded as output.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
title: str | None = SchemaField(
|
||||
description="The title of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
description: str | None = SchemaField(
|
||||
description="The description of the output.",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value. Use Jinja2 syntax.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
advanced: bool = SchemaField(
|
||||
description="Whether to treat the output as advanced.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
secret: bool = SchemaField(
|
||||
description="Whether the output should be treated as a secret.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
def generate_schema(self):
|
||||
return self.get_field_schema("value")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
name: Any = SchemaField(description="The name of the value recorded as output.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
description="Stores the output of the graph for users to see.",
|
||||
input_schema=AgentOutputBlock.Input,
|
||||
output_schema=AgentOutputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
"name": "output_1",
|
||||
"description": "This is a test output.",
|
||||
"format": "{{ output_1 }}!!",
|
||||
},
|
||||
{
|
||||
"value": "42",
|
||||
"name": "output_2",
|
||||
"description": "This is another test output.",
|
||||
"format": "{{ output_2 }}",
|
||||
},
|
||||
{
|
||||
"value": MockObject(value="!!", key="key"),
|
||||
"name": "output_3",
|
||||
"description": "This is a test output with a mock object.",
|
||||
"format": "{{ output_3 }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World!!!"),
|
||||
("output", "42"),
|
||||
("output", MockObject(value="!!", key="key")),
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
block_type=BlockType.OUTPUT,
|
||||
static_output=True,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, *args, **kwargs) -> BlockOutput:
|
||||
"""
|
||||
Attempts to format the recorded_value using the fmt_string if provided.
|
||||
If formatting fails or no fmt_string is given, returns the original recorded_value.
|
||||
"""
|
||||
if input_data.format:
|
||||
try:
|
||||
yield "output", formatter.format_string(
|
||||
input_data.format, {input_data.name: input_data.value}
|
||||
)
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.value
|
||||
yield "name", input_data.name
|
||||
|
||||
|
||||
class AgentShortTextInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[ShortTextType] = SchemaField(
|
||||
description="Short text input.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Short text result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7fcd3bcb-8e1b-4e69-903d-32d3d4a92158",
|
||||
description="Block for short text input (single-line).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentShortTextInputBlock.Input,
|
||||
output_schema=AgentShortTextInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello",
|
||||
"name": "short_text_1",
|
||||
"description": "Short text example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Quick test",
|
||||
"name": "short_text_2",
|
||||
"description": "Short text example 2",
|
||||
"placeholder_values": ["Quick test", "Another option"],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Hello"),
|
||||
("result", "Quick test"),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentLongTextInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[LongTextType] = SchemaField(
|
||||
description="Long text input (potentially multi-line).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Long text result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="90a56ffb-7024-4b2b-ab50-e26c5e5ab8ba",
|
||||
description="Block for long text input (multi-line).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentLongTextInputBlock.Input,
|
||||
output_schema=AgentLongTextInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Lorem ipsum dolor sit amet...",
|
||||
"name": "long_text_1",
|
||||
"description": "Long text example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": "Another multiline text input.",
|
||||
"name": "long_text_2",
|
||||
"description": "Long text example 2",
|
||||
"placeholder_values": ["Another multiline text input."],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Lorem ipsum dolor sit amet..."),
|
||||
("result", "Another multiline text input."),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentNumberInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[int] = SchemaField(
|
||||
description="Number input.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: int = SchemaField(description="Number result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="96dae2bb-97a2-41c2-bd2f-13a3b5a8ea98",
|
||||
description="Block for number input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentNumberInputBlock.Input,
|
||||
output_schema=AgentNumberInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": 42,
|
||||
"name": "number_input_1",
|
||||
"description": "Number example 1",
|
||||
"placeholder_values": [],
|
||||
},
|
||||
{
|
||||
"value": 314,
|
||||
"name": "number_input_2",
|
||||
"description": "Number example 2",
|
||||
"placeholder_values": [314, 2718],
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", 42),
|
||||
("result", 314),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentDateInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[date] = SchemaField(
|
||||
description="Date input (YYYY-MM-DD).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: date = SchemaField(description="Date result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7e198b09-4994-47db-8b4d-952d98241817",
|
||||
description="Block for date input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentDateInputBlock.Input,
|
||||
output_schema=AgentDateInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
# If your system can parse JSON date strings to date objects
|
||||
"value": str(date(2025, 3, 19)),
|
||||
"name": "date_input_1",
|
||||
"description": "Example date input 1",
|
||||
},
|
||||
{
|
||||
"value": str(date(2023, 12, 31)),
|
||||
"name": "date_input_2",
|
||||
"description": "Example date input 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", date(2025, 3, 19)),
|
||||
("result", date(2023, 12, 31)),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentTimeInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[time] = SchemaField(
|
||||
description="Time input (HH:MM:SS).",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: time = SchemaField(description="Time result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2a1c757e-86cf-4c7e-aacf-060dc382e434",
|
||||
description="Block for time input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentTimeInputBlock.Input,
|
||||
output_schema=AgentTimeInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": str(time(9, 30, 0)),
|
||||
"name": "time_input_1",
|
||||
"description": "Time example 1",
|
||||
},
|
||||
{
|
||||
"value": str(time(23, 59, 59)),
|
||||
"name": "time_input_2",
|
||||
"description": "Time example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", time(9, 30, 0)),
|
||||
("result", time(23, 59, 59)),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentFileInputBlock(AgentInputBlock):
|
||||
"""
|
||||
A simplified file-upload block. In real usage, you might have a custom
|
||||
file type or handle binary data. Here, we'll store a string path as the example.
|
||||
"""
|
||||
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[MediaFileType] = SchemaField(
|
||||
description="Path or reference to an uploaded file.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="File reference/path result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="95ead23f-8283-4654-aef3-10c053b74a31",
|
||||
description="Block for file upload input (string path for example).",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentFileInputBlock.Input,
|
||||
output_schema=AgentFileInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "data:image/png;base64,MQ==",
|
||||
"name": "file_upload_1",
|
||||
"description": "Example file upload 1",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", str),
|
||||
],
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
if not input_data.value:
|
||||
return
|
||||
|
||||
file_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.value,
|
||||
return_content=False,
|
||||
)
|
||||
yield "result", file_path
|
||||
|
||||
|
||||
class AgentDropdownInputBlock(AgentInputBlock):
|
||||
"""
|
||||
A specialized text input block that relies on placeholder_values to present a dropdown.
|
||||
"""
|
||||
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: Optional[str] = SchemaField(
|
||||
description="Text selected from a dropdown.",
|
||||
default=None,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
placeholder_values: list = SchemaField(
|
||||
description="Possible values for the dropdown.",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
title="Dropdown Options",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: str = SchemaField(description="Selected dropdown value.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="655d6fdf-a334-421c-b733-520549c07cd1",
|
||||
description="Block for dropdown text selection.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentDropdownInputBlock.Input,
|
||||
output_schema=AgentDropdownInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Option A",
|
||||
"name": "dropdown_1",
|
||||
"placeholder_values": ["Option A", "Option B", "Option C"],
|
||||
"description": "Dropdown example 1",
|
||||
},
|
||||
{
|
||||
"value": "Option C",
|
||||
"name": "dropdown_2",
|
||||
"placeholder_values": ["Option A", "Option B", "Option C"],
|
||||
"description": "Dropdown example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", "Option A"),
|
||||
("result", "Option C"),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
class AgentToggleInputBlock(AgentInputBlock):
|
||||
class Input(AgentInputBlock.Input):
|
||||
value: bool = SchemaField(
|
||||
description="Boolean toggle input.",
|
||||
default=False,
|
||||
advanced=False,
|
||||
title="Default Value",
|
||||
)
|
||||
|
||||
class Output(AgentInputBlock.Output):
|
||||
result: bool = SchemaField(description="Boolean toggle result.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="cbf36ab5-df4a-43b6-8a7f-f7ed8652116e",
|
||||
description="Block for boolean toggle input.",
|
||||
disabled=not config.enable_agent_input_subtype_blocks,
|
||||
input_schema=AgentToggleInputBlock.Input,
|
||||
output_schema=AgentToggleInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": True,
|
||||
"name": "toggle_1",
|
||||
"description": "Toggle example 1",
|
||||
},
|
||||
{
|
||||
"value": False,
|
||||
"name": "toggle_2",
|
||||
"description": "Toggle example 2",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("result", True),
|
||||
("result", False),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
IO_BLOCK_IDs = [
|
||||
AgentInputBlock().id,
|
||||
AgentOutputBlock().id,
|
||||
AgentShortTextInputBlock().id,
|
||||
AgentLongTextInputBlock().id,
|
||||
AgentNumberInputBlock().id,
|
||||
AgentDateInputBlock().id,
|
||||
AgentTimeInputBlock().id,
|
||||
AgentFileInputBlock().id,
|
||||
AgentDropdownInputBlock().id,
|
||||
AgentToggleInputBlock().id,
|
||||
]
|
||||
@@ -11,13 +11,13 @@ class StepThroughItemsBlock(Block):
|
||||
advanced=False,
|
||||
description="The list or dictionary of items to iterate over",
|
||||
placeholder="[1, 2, 3, 4, 5] or {'key1': 'value1', 'key2': 'value2'}",
|
||||
default_factory=list,
|
||||
default=[],
|
||||
)
|
||||
items_object: dict = SchemaField(
|
||||
advanced=False,
|
||||
description="The list or dictionary of items to iterate over",
|
||||
placeholder="[1, 2, 3, 4, 5] or {'key1': 'value1', 'key2': 'value2'}",
|
||||
default_factory=dict,
|
||||
default={},
|
||||
)
|
||||
items_str: str = SchemaField(
|
||||
advanced=False,
|
||||
|
||||
@@ -23,7 +23,7 @@ class JinaChunkingBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
chunks: list = SchemaField(description="List of chunked texts")
|
||||
tokens: list = SchemaField(
|
||||
description="List of token information for each chunk",
|
||||
description="List of token information for each chunk", optional=True
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from urllib.parse import quote
|
||||
from groq._utils._utils import quote
|
||||
|
||||
from backend.blocks.jina._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
|
||||
@@ -1,272 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from backend.blocks.linear._auth import LinearCredentials
|
||||
from backend.blocks.linear.models import (
|
||||
CreateCommentResponse,
|
||||
CreateIssueResponse,
|
||||
Issue,
|
||||
Project,
|
||||
)
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class LinearAPIException(Exception):
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class LinearClient:
|
||||
"""Client for the Linear API
|
||||
|
||||
If you're looking for the schema: https://studio.apollographql.com/public/Linear-API/variant/current/schema
|
||||
"""
|
||||
|
||||
API_URL = "https://api.linear.app/graphql"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
credentials: LinearCredentials | None = None,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
if credentials:
|
||||
headers["Authorization"] = credentials.auth_header()
|
||||
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
trusted_origins=["https://api.linear.app"],
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
def _execute_graphql_request(
|
||||
self, query: str, variables: dict | None = None
|
||||
) -> Any:
|
||||
"""
|
||||
Executes a GraphQL request against the Linear API and returns the response data.
|
||||
|
||||
Args:
|
||||
query: The GraphQL query string.
|
||||
variables (optional): Any GraphQL query variables
|
||||
|
||||
Returns:
|
||||
The parsed JSON response data, or raises a LinearAPIException on error.
|
||||
"""
|
||||
payload: Dict[str, Any] = {"query": query}
|
||||
if variables:
|
||||
payload["variables"] = variables
|
||||
|
||||
response = self._requests.post(self.API_URL, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("errors", [{}])[0].get("message", "")
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise LinearAPIException(
|
||||
f"Linear API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if "errors" in response_data:
|
||||
|
||||
error_messages = [
|
||||
error.get("message", "") for error in response_data["errors"]
|
||||
]
|
||||
raise LinearAPIException(
|
||||
f"Linear API returned errors: {', '.join(error_messages)}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return response_data["data"]
|
||||
|
||||
def query(self, query: str, variables: Optional[dict] = None) -> dict:
|
||||
"""Executes a GraphQL query.
|
||||
|
||||
Args:
|
||||
query: The GraphQL query string.
|
||||
variables: Query variables, if any.
|
||||
|
||||
Returns:
|
||||
The response data.
|
||||
"""
|
||||
return self._execute_graphql_request(query, variables)
|
||||
|
||||
def mutate(self, mutation: str, variables: Optional[dict] = None) -> dict:
|
||||
"""Executes a GraphQL mutation.
|
||||
|
||||
Args:
|
||||
mutation: The GraphQL mutation string.
|
||||
variables: Query variables, if any.
|
||||
|
||||
Returns:
|
||||
The response data.
|
||||
"""
|
||||
return self._execute_graphql_request(mutation, variables)
|
||||
|
||||
def try_create_comment(self, issue_id: str, comment: str) -> CreateCommentResponse:
|
||||
try:
|
||||
mutation = """
|
||||
mutation CommentCreate($input: CommentCreateInput!) {
|
||||
commentCreate(input: $input) {
|
||||
success
|
||||
comment {
|
||||
id
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
"input": {
|
||||
"body": comment,
|
||||
"issueId": issue_id,
|
||||
}
|
||||
}
|
||||
|
||||
added_comment = self.mutate(mutation, variables)
|
||||
# Select the commentCreate field from the mutation response
|
||||
return CreateCommentResponse(**added_comment["commentCreate"])
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_get_team_by_name(self, team_name: str) -> str:
|
||||
try:
|
||||
query = """
|
||||
query GetTeamId($searchTerm: String!) {
|
||||
teams(filter: {
|
||||
or: [
|
||||
{ name: { eqIgnoreCase: $searchTerm } },
|
||||
{ key: { eqIgnoreCase: $searchTerm } }
|
||||
]
|
||||
}) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
key
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"searchTerm": team_name,
|
||||
}
|
||||
|
||||
team_id = self.query(query, variables)
|
||||
return team_id["teams"]["nodes"][0]["id"]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_create_issue(
|
||||
self,
|
||||
team_id: str,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
priority: int | None = None,
|
||||
project_id: str | None = None,
|
||||
) -> CreateIssueResponse:
|
||||
try:
|
||||
mutation = """
|
||||
mutation IssueCreate($input: IssueCreateInput!) {
|
||||
issueCreate(input: $input) {
|
||||
issue {
|
||||
title
|
||||
description
|
||||
id
|
||||
identifier
|
||||
priority
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"input": {
|
||||
"teamId": team_id,
|
||||
"title": title,
|
||||
}
|
||||
}
|
||||
|
||||
if project_id:
|
||||
variables["input"]["projectId"] = project_id
|
||||
|
||||
if description:
|
||||
variables["input"]["description"] = description
|
||||
|
||||
if priority:
|
||||
variables["input"]["priority"] = priority
|
||||
|
||||
added_issue = self.mutate(mutation, variables)
|
||||
return CreateIssueResponse(**added_issue["issueCreate"])
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_search_projects(self, term: str) -> list[Project]:
|
||||
try:
|
||||
query = """
|
||||
query SearchProjects($term: String!, $includeComments: Boolean!) {
|
||||
searchProjects(term: $term, includeComments: $includeComments) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
description
|
||||
priority
|
||||
progress
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"term": term,
|
||||
"includeComments": True,
|
||||
}
|
||||
|
||||
projects = self.query(query, variables)
|
||||
return [
|
||||
Project(**project) for project in projects["searchProjects"]["nodes"]
|
||||
]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_search_issues(self, term: str) -> list[Issue]:
|
||||
try:
|
||||
query = """
|
||||
query SearchIssues($term: String!, $includeComments: Boolean!) {
|
||||
searchIssues(term: $term, includeComments: $includeComments) {
|
||||
nodes {
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
priority
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"term": term,
|
||||
"includeComments": True,
|
||||
}
|
||||
|
||||
issues = self.query(query, variables)
|
||||
return [Issue(**issue) for issue in issues["searchIssues"]["nodes"]]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
@@ -1,101 +0,0 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
OAuth2Credentials,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
secrets = Secrets()
|
||||
LINEAR_OAUTH_IS_CONFIGURED = bool(
|
||||
secrets.linear_client_id and secrets.linear_client_secret
|
||||
)
|
||||
|
||||
LinearCredentials = OAuth2Credentials | APIKeyCredentials
|
||||
# LinearCredentialsInput = CredentialsMetaInput[
|
||||
# Literal[ProviderName.LINEAR],
|
||||
# Literal["oauth2", "api_key"] if LINEAR_OAUTH_IS_CONFIGURED else Literal["oauth2"],
|
||||
# ]
|
||||
LinearCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.LINEAR], Literal["oauth2"]
|
||||
]
|
||||
|
||||
|
||||
# (required) Comma separated list of scopes:
|
||||
|
||||
# read - (Default) Read access for the user's account. This scope will always be present.
|
||||
|
||||
# write - Write access for the user's account. If your application only needs to create comments, use a more targeted scope
|
||||
|
||||
# issues:create - Allows creating new issues and their attachments
|
||||
|
||||
# comments:create - Allows creating new issue comments
|
||||
|
||||
# timeSchedule:write - Allows creating and modifying time schedules
|
||||
|
||||
|
||||
# admin - Full access to admin level endpoints. You should never ask for this permission unless it's absolutely needed
|
||||
class LinearScope(str, Enum):
|
||||
READ = "read"
|
||||
WRITE = "write"
|
||||
ISSUES_CREATE = "issues:create"
|
||||
COMMENTS_CREATE = "comments:create"
|
||||
TIME_SCHEDULE_WRITE = "timeSchedule:write"
|
||||
ADMIN = "admin"
|
||||
|
||||
|
||||
def LinearCredentialsField(scopes: list[LinearScope]) -> LinearCredentialsInput:
|
||||
"""
|
||||
Creates a Linear credentials input on a block.
|
||||
|
||||
Params:
|
||||
scope: The authorization scope needed for the block to work. ([list of available scopes](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes))
|
||||
""" # noqa
|
||||
return CredentialsField(
|
||||
required_scopes=set([LinearScope.READ.value]).union(
|
||||
set([scope.value for scope in scopes])
|
||||
),
|
||||
description="The Linear integration can be used with OAuth, "
|
||||
"or any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS_OAUTH = OAuth2Credentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="linear",
|
||||
title="Mock Linear API key",
|
||||
username="mock-linear-username",
|
||||
access_token=SecretStr("mock-linear-access-token"),
|
||||
access_token_expires_at=None,
|
||||
refresh_token=SecretStr("mock-linear-refresh-token"),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=["mock-linear-scopes"],
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_API_KEY = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="linear",
|
||||
title="Mock Linear API key",
|
||||
api_key=SecretStr("mock-linear-api-key"),
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT_OAUTH = {
|
||||
"provider": TEST_CREDENTIALS_OAUTH.provider,
|
||||
"id": TEST_CREDENTIALS_OAUTH.id,
|
||||
"type": TEST_CREDENTIALS_OAUTH.type,
|
||||
"title": TEST_CREDENTIALS_OAUTH.type,
|
||||
}
|
||||
|
||||
TEST_CREDENTIALS_INPUT_API_KEY = {
|
||||
"provider": TEST_CREDENTIALS_API_KEY.provider,
|
||||
"id": TEST_CREDENTIALS_API_KEY.id,
|
||||
"type": TEST_CREDENTIALS_API_KEY.type,
|
||||
"title": TEST_CREDENTIALS_API_KEY.type,
|
||||
}
|
||||
@@ -1,83 +0,0 @@
|
||||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
LINEAR_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import CreateCommentResponse
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearCreateCommentBlock(Block):
|
||||
"""Block for creating comments on Linear issues"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.COMMENTS_CREATE],
|
||||
)
|
||||
issue_id: str = SchemaField(description="ID of the issue to comment on")
|
||||
comment: str = SchemaField(description="Comment text to add to the issue")
|
||||
|
||||
class Output(BlockSchema):
|
||||
comment_id: str = SchemaField(description="ID of the created comment")
|
||||
comment_body: str = SchemaField(
|
||||
description="Text content of the created comment"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if comment creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8f7d3a2e-9b5c-4c6a-8f1d-7c8b3e4a5d6c",
|
||||
description="Creates a new comment on a Linear issue",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"issue_id": "TEST-123",
|
||||
"comment": "Test comment",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[("comment_id", "abc123"), ("comment_body", "Test comment")],
|
||||
test_mock={
|
||||
"create_comment": lambda *args, **kwargs: (
|
||||
"abc123",
|
||||
"Test comment",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
credentials: LinearCredentials, issue_id: str, comment: str
|
||||
) -> tuple[str, str]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: CreateCommentResponse = client.try_create_comment(
|
||||
issue_id=issue_id, comment=comment
|
||||
)
|
||||
return response.comment.id, response.comment.body
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the comment creation"""
|
||||
try:
|
||||
comment_id, comment_body = self.create_comment(
|
||||
credentials=credentials,
|
||||
issue_id=input_data.issue_id,
|
||||
comment=input_data.comment,
|
||||
)
|
||||
|
||||
yield "comment_id", comment_id
|
||||
yield "comment_body", comment_body
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
@@ -1,189 +0,0 @@
|
||||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
LINEAR_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import CreateIssueResponse, Issue
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearCreateIssueBlock(Block):
|
||||
"""Block for creating issues on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.ISSUES_CREATE],
|
||||
)
|
||||
title: str = SchemaField(description="Title of the issue")
|
||||
description: str | None = SchemaField(description="Description of the issue")
|
||||
team_name: str = SchemaField(
|
||||
description="Name of the team to create the issue on"
|
||||
)
|
||||
priority: int | None = SchemaField(
|
||||
description="Priority of the issue",
|
||||
default=None,
|
||||
ge=0,
|
||||
le=4,
|
||||
)
|
||||
project_name: str | None = SchemaField(
|
||||
description="Name of the project to create the issue on",
|
||||
default=None,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
issue_id: str = SchemaField(description="ID of the created issue")
|
||||
issue_title: str = SchemaField(description="Title of the created issue")
|
||||
error: str = SchemaField(description="Error message if issue creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f9c68f55-dcca-40a8-8771-abf9601680aa",
|
||||
description="Creates a new issue on Linear",
|
||||
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"title": "Test issue",
|
||||
"description": "Test description",
|
||||
"team_name": "Test team",
|
||||
"project_name": "Test project",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[("issue_id", "abc123"), ("issue_title", "Test issue")],
|
||||
test_mock={
|
||||
"create_issue": lambda *args, **kwargs: (
|
||||
"abc123",
|
||||
"Test issue",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_issue(
|
||||
credentials: LinearCredentials,
|
||||
team_name: str,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
priority: int | None = None,
|
||||
project_name: str | None = None,
|
||||
) -> tuple[str, str]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
team_id = client.try_get_team_by_name(team_name=team_name)
|
||||
project_id: str | None = None
|
||||
if project_name:
|
||||
projects = client.try_search_projects(term=project_name)
|
||||
if projects:
|
||||
project_id = projects[0].id
|
||||
else:
|
||||
raise LinearAPIException("Project not found", status_code=404)
|
||||
response: CreateIssueResponse = client.try_create_issue(
|
||||
team_id=team_id,
|
||||
title=title,
|
||||
description=description,
|
||||
priority=priority,
|
||||
project_id=project_id,
|
||||
)
|
||||
return response.issue.identifier, response.issue.title
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the issue creation"""
|
||||
try:
|
||||
issue_id, issue_title = self.create_issue(
|
||||
credentials=credentials,
|
||||
team_name=input_data.team_name,
|
||||
title=input_data.title,
|
||||
description=input_data.description,
|
||||
priority=input_data.priority,
|
||||
project_name=input_data.project_name,
|
||||
)
|
||||
|
||||
yield "issue_id", issue_id
|
||||
yield "issue_title", issue_title
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
|
||||
|
||||
class LinearSearchIssuesBlock(Block):
|
||||
"""Block for searching issues on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
term: str = SchemaField(description="Term to search for issues")
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.READ],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
issues: list[Issue] = SchemaField(description="List of issues")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b5a2a0e6-26b4-4c5b-8a42-bc79e9cb65c2",
|
||||
description="Searches for issues on Linear",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
|
||||
test_input={
|
||||
"term": "Test issue",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[
|
||||
(
|
||||
"issues",
|
||||
[
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="abc123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"search_issues": lambda *args, **kwargs: [
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="abc123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_issues(
|
||||
credentials: LinearCredentials,
|
||||
term: str,
|
||||
) -> list[Issue]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: list[Issue] = client.try_search_issues(term=term)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the issue search"""
|
||||
try:
|
||||
issues = self.search_issues(credentials=credentials, term=input_data.term)
|
||||
yield "issues", issues
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
@@ -1,41 +0,0 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Comment(BaseModel):
|
||||
id: str
|
||||
body: str
|
||||
|
||||
|
||||
class CreateCommentInput(BaseModel):
|
||||
body: str
|
||||
issueId: str
|
||||
|
||||
|
||||
class CreateCommentResponse(BaseModel):
|
||||
success: bool
|
||||
comment: Comment
|
||||
|
||||
|
||||
class CreateCommentResponseWrapper(BaseModel):
|
||||
commentCreate: CreateCommentResponse
|
||||
|
||||
|
||||
class Issue(BaseModel):
|
||||
id: str
|
||||
identifier: str
|
||||
title: str
|
||||
description: str | None
|
||||
priority: int
|
||||
|
||||
|
||||
class CreateIssueResponse(BaseModel):
|
||||
issue: Issue
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
priority: int
|
||||
progress: int
|
||||
content: str
|
||||
@@ -1,95 +0,0 @@
|
||||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
LINEAR_OAUTH_IS_CONFIGURED,
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import Project
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearSearchProjectsBlock(Block):
|
||||
"""Block for searching projects on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.READ],
|
||||
)
|
||||
term: str = SchemaField(description="Term to search for projects")
|
||||
|
||||
class Output(BlockSchema):
|
||||
projects: list[Project] = SchemaField(description="List of projects")
|
||||
error: str = SchemaField(description="Error message if issue creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="446a1d35-9d8f-4ac5-83ea-7684ec50e6af",
|
||||
description="Searches for projects on Linear",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"term": "Test project",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
disabled=not LINEAR_OAUTH_IS_CONFIGURED,
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[
|
||||
(
|
||||
"projects",
|
||||
[
|
||||
Project(
|
||||
id="abc123",
|
||||
name="Test project",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
progress=1,
|
||||
content="Test content",
|
||||
)
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"search_projects": lambda *args, **kwargs: [
|
||||
Project(
|
||||
id="abc123",
|
||||
name="Test project",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
progress=1,
|
||||
content="Test content",
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_projects(
|
||||
credentials: LinearCredentials,
|
||||
term: str,
|
||||
) -> list[Project]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: list[Project] = client.try_search_projects(term=term)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the project search"""
|
||||
try:
|
||||
projects = self.search_projects(
|
||||
credentials=credentials,
|
||||
term=input_data.term,
|
||||
)
|
||||
|
||||
yield "projects", projects
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,245 +0,0 @@
|
||||
import os
|
||||
import tempfile
|
||||
from typing import Literal, Optional
|
||||
|
||||
from moviepy.audio.io.AudioFileClip import AudioFileClip
|
||||
from moviepy.video.fx.Loop import Loop
|
||||
from moviepy.video.io.VideoFileClip import VideoFileClip
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.file import MediaFileType, get_exec_file_path, store_media_file
|
||||
|
||||
|
||||
class MediaDurationBlock(Block):
|
||||
|
||||
class Input(BlockSchema):
|
||||
media_in: MediaFileType = SchemaField(
|
||||
description="Media input (URL, data URI, or local path)."
|
||||
)
|
||||
is_video: bool = SchemaField(
|
||||
description="Whether the media is a video (True) or audio (False).",
|
||||
default=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
duration: float = SchemaField(
|
||||
description="Duration of the media file (in seconds)."
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if something fails.", default=""
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d8b91fd4-da26-42d4-8ecb-8b196c6d84b6",
|
||||
description="Block to get the duration of a media file.",
|
||||
categories={BlockCategory.MULTIMEDIA},
|
||||
input_schema=MediaDurationBlock.Input,
|
||||
output_schema=MediaDurationBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
# 1) Store the input media locally
|
||||
local_media_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.media_in,
|
||||
return_content=False,
|
||||
)
|
||||
media_abspath = get_exec_file_path(graph_exec_id, local_media_path)
|
||||
|
||||
# 2) Load the clip
|
||||
if input_data.is_video:
|
||||
clip = VideoFileClip(media_abspath)
|
||||
else:
|
||||
clip = AudioFileClip(media_abspath)
|
||||
|
||||
yield "duration", clip.duration
|
||||
|
||||
|
||||
class LoopVideoBlock(Block):
|
||||
"""
|
||||
Block for looping (repeating) a video clip until a given duration or number of loops.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
video_in: MediaFileType = SchemaField(
|
||||
description="The input video (can be a URL, data URI, or local path)."
|
||||
)
|
||||
# Provide EITHER a `duration` or `n_loops` or both. We'll demonstrate `duration`.
|
||||
duration: Optional[float] = SchemaField(
|
||||
description="Target duration (in seconds) to loop the video to. If omitted, defaults to no looping.",
|
||||
default=None,
|
||||
ge=0.0,
|
||||
)
|
||||
n_loops: Optional[int] = SchemaField(
|
||||
description="Number of times to repeat the video. If omitted, defaults to 1 (no repeat).",
|
||||
default=None,
|
||||
ge=1,
|
||||
)
|
||||
output_return_type: Literal["file_path", "data_uri"] = SchemaField(
|
||||
description="How to return the output video. Either a relative path or base64 data URI.",
|
||||
default="file_path",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_out: str = SchemaField(
|
||||
description="Looped video returned either as a relative path or a data URI."
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if something fails.", default=""
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8bf9eef6-5451-4213-b265-25306446e94b",
|
||||
description="Block to loop a video to a given duration or number of repeats.",
|
||||
categories={BlockCategory.MULTIMEDIA},
|
||||
input_schema=LoopVideoBlock.Input,
|
||||
output_schema=LoopVideoBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
node_exec_id: str,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
# 1) Store the input video locally
|
||||
local_video_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.video_in,
|
||||
return_content=False,
|
||||
)
|
||||
input_abspath = get_exec_file_path(graph_exec_id, local_video_path)
|
||||
|
||||
# 2) Load the clip
|
||||
clip = VideoFileClip(input_abspath)
|
||||
|
||||
# 3) Apply the loop effect
|
||||
looped_clip = clip
|
||||
if input_data.duration:
|
||||
# Loop until we reach the specified duration
|
||||
looped_clip = looped_clip.with_effects([Loop(duration=input_data.duration)])
|
||||
elif input_data.n_loops:
|
||||
looped_clip = looped_clip.with_effects([Loop(n=input_data.n_loops)])
|
||||
else:
|
||||
raise ValueError("Either 'duration' or 'n_loops' must be provided.")
|
||||
|
||||
assert isinstance(looped_clip, VideoFileClip)
|
||||
|
||||
# 4) Save the looped output
|
||||
output_filename = MediaFileType(
|
||||
f"{node_exec_id}_looped_{os.path.basename(local_video_path)}"
|
||||
)
|
||||
output_abspath = get_exec_file_path(graph_exec_id, output_filename)
|
||||
|
||||
looped_clip = looped_clip.with_audio(clip.audio)
|
||||
looped_clip.write_videofile(output_abspath, codec="libx264", audio_codec="aac")
|
||||
|
||||
# Return as data URI
|
||||
video_out = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=output_filename,
|
||||
return_content=input_data.output_return_type == "data_uri",
|
||||
)
|
||||
|
||||
yield "video_out", video_out
|
||||
|
||||
|
||||
class AddAudioToVideoBlock(Block):
|
||||
"""
|
||||
Block that adds (attaches) an audio track to an existing video.
|
||||
Optionally scale the volume of the new track.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
video_in: MediaFileType = SchemaField(
|
||||
description="Video input (URL, data URI, or local path)."
|
||||
)
|
||||
audio_in: MediaFileType = SchemaField(
|
||||
description="Audio input (URL, data URI, or local path)."
|
||||
)
|
||||
volume: float = SchemaField(
|
||||
description="Volume scale for the newly attached audio track (1.0 = original).",
|
||||
default=1.0,
|
||||
)
|
||||
output_return_type: Literal["file_path", "data_uri"] = SchemaField(
|
||||
description="Return the final output as a relative path or base64 data URI.",
|
||||
default="file_path",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
video_out: MediaFileType = SchemaField(
|
||||
description="Final video (with attached audio), as a path or data URI."
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if something fails.", default=""
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3503748d-62b6-4425-91d6-725b064af509",
|
||||
description="Block to attach an audio file to a video file using moviepy.",
|
||||
categories={BlockCategory.MULTIMEDIA},
|
||||
input_schema=AddAudioToVideoBlock.Input,
|
||||
output_schema=AddAudioToVideoBlock.Output,
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
node_exec_id: str,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
# 1) Store the inputs locally
|
||||
local_video_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.video_in,
|
||||
return_content=False,
|
||||
)
|
||||
local_audio_path = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=input_data.audio_in,
|
||||
return_content=False,
|
||||
)
|
||||
|
||||
abs_temp_dir = os.path.join(tempfile.gettempdir(), "exec_file", graph_exec_id)
|
||||
video_abspath = os.path.join(abs_temp_dir, local_video_path)
|
||||
audio_abspath = os.path.join(abs_temp_dir, local_audio_path)
|
||||
|
||||
# 2) Load video + audio with moviepy
|
||||
video_clip = VideoFileClip(video_abspath)
|
||||
audio_clip = AudioFileClip(audio_abspath)
|
||||
# Optionally scale volume
|
||||
if input_data.volume != 1.0:
|
||||
audio_clip = audio_clip.with_volume_scaled(input_data.volume)
|
||||
|
||||
# 3) Attach the new audio track
|
||||
final_clip = video_clip.with_audio(audio_clip)
|
||||
|
||||
# 4) Write to output file
|
||||
output_filename = MediaFileType(
|
||||
f"{node_exec_id}_audio_attached_{os.path.basename(local_video_path)}"
|
||||
)
|
||||
output_abspath = os.path.join(abs_temp_dir, output_filename)
|
||||
final_clip.write_videofile(output_abspath, codec="libx264", audio_codec="aac")
|
||||
|
||||
# 5) Return either path or data URI
|
||||
video_out = store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=output_filename,
|
||||
return_content=input_data.output_return_type == "data_uri",
|
||||
)
|
||||
|
||||
yield "video_out", video_out
|
||||
@@ -1,338 +0,0 @@
|
||||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
from mem0 import MemoryClient
|
||||
from pydantic import BaseModel, SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
|
||||
provider="mem0",
|
||||
api_key=SecretStr("mock-mem0-api-key"),
|
||||
title="Mock Mem0 API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
class Mem0Base:
|
||||
"""Base class with shared utilities for Mem0 blocks"""
|
||||
|
||||
@staticmethod
|
||||
def _get_client(credentials: APIKeyCredentials) -> MemoryClient:
|
||||
"""Get initialized Mem0 client"""
|
||||
return MemoryClient(api_key=credentials.api_key.get_secret_value())
|
||||
|
||||
|
||||
Filter = dict[str, list[dict[str, str | dict[str, list[str]]]]]
|
||||
|
||||
|
||||
class Conversation(BaseModel):
|
||||
discriminator: Literal["conversation"]
|
||||
messages: list[dict[str, str]]
|
||||
|
||||
|
||||
class Content(BaseModel):
|
||||
discriminator: Literal["content"]
|
||||
content: str
|
||||
|
||||
|
||||
class AddMemoryBlock(Block, Mem0Base):
|
||||
"""Block for adding memories to Mem0
|
||||
|
||||
Always limited by user_id and optional graph_id and graph_exec_id"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
content: Union[Content, Conversation] = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Content to add - either a string or list of message objects as output from an AI block",
|
||||
default=Content(discriminator="content", content="I'm a vegetarian"),
|
||||
)
|
||||
metadata: dict[str, Any] = SchemaField(
|
||||
description="Optional metadata for the memory", default_factory=dict
|
||||
)
|
||||
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
action: str = SchemaField(description="Action of the operation")
|
||||
memory: str = SchemaField(description="Memory created")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="dce97578-86be-45a4-ae50-f6de33fc935a",
|
||||
description="Add new memories to Mem0 with user segmentation",
|
||||
input_schema=AddMemoryBlock.Input,
|
||||
output_schema=AddMemoryBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"content": {
|
||||
"discriminator": "conversation",
|
||||
"messages": [{"role": "user", "content": "I'm a vegetarian"}],
|
||||
},
|
||||
"metadata": {"food": "vegetarian"},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
{
|
||||
"content": {
|
||||
"discriminator": "content",
|
||||
"content": "I am a vegetarian",
|
||||
},
|
||||
"metadata": {"food": "vegetarian"},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
],
|
||||
test_output=[("action", "NO_CHANGE"), ("action", "NO_CHANGE")],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
if isinstance(input_data.content, Conversation):
|
||||
messages = input_data.content.messages
|
||||
else:
|
||||
messages = [{"role": "user", "content": input_data.content}]
|
||||
|
||||
params = {
|
||||
"user_id": user_id,
|
||||
"output_format": "v1.1",
|
||||
"metadata": input_data.metadata,
|
||||
}
|
||||
|
||||
if input_data.limit_memory_to_run:
|
||||
params["run_id"] = graph_exec_id
|
||||
if input_data.limit_memory_to_agent:
|
||||
params["agent_id"] = graph_id
|
||||
|
||||
# Use the client to add memory
|
||||
result = client.add(
|
||||
messages,
|
||||
**params,
|
||||
)
|
||||
|
||||
if len(result.get("results", [])) > 0:
|
||||
for result in result.get("results", []):
|
||||
yield "action", result["event"]
|
||||
yield "memory", result["memory"]
|
||||
else:
|
||||
yield "action", "NO_CHANGE"
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(object=e)
|
||||
|
||||
|
||||
class SearchMemoryBlock(Block, Mem0Base):
|
||||
"""Block for searching memories in Mem0"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
query: str = SchemaField(
|
||||
description="Search query",
|
||||
advanced=False,
|
||||
)
|
||||
trigger: bool = SchemaField(
|
||||
description="An unused field that is used to (re-)trigger the block when you have no other inputs",
|
||||
default=False,
|
||||
advanced=False,
|
||||
)
|
||||
categories_filter: list[str] = SchemaField(
|
||||
description="Categories to filter by",
|
||||
default_factory=list,
|
||||
advanced=True,
|
||||
)
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
memories: Any = SchemaField(description="List of matching memories")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="bd7c84e3-e073-4b75-810c-600886ec8a5b",
|
||||
description="Search memories in Mem0 by user",
|
||||
input_schema=SearchMemoryBlock.Input,
|
||||
output_schema=SearchMemoryBlock.Output,
|
||||
test_input={
|
||||
"query": "vegetarian preferences",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"top_k": 10,
|
||||
"rerank": True,
|
||||
},
|
||||
test_output=[
|
||||
("memories", [{"id": "test-memory", "content": "test content"}])
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
filters: Filter = {
|
||||
# This works with only one filter, so we can allow others to add on later
|
||||
"AND": [
|
||||
{"user_id": user_id},
|
||||
]
|
||||
}
|
||||
if input_data.categories_filter:
|
||||
filters["AND"].append(
|
||||
{"categories": {"contains": input_data.categories_filter}}
|
||||
)
|
||||
if input_data.limit_memory_to_run:
|
||||
filters["AND"].append({"run_id": graph_exec_id})
|
||||
if input_data.limit_memory_to_agent:
|
||||
filters["AND"].append({"agent_id": graph_id})
|
||||
|
||||
result: list[dict[str, Any]] = client.search(
|
||||
input_data.query, version="v2", filters=filters
|
||||
)
|
||||
yield "memories", result
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class GetAllMemoriesBlock(Block, Mem0Base):
|
||||
"""Block for retrieving all memories from Mem0"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
trigger: bool = SchemaField(
|
||||
description="An unused field that is used to trigger the block when you have no other inputs",
|
||||
default=False,
|
||||
advanced=False,
|
||||
)
|
||||
categories: Optional[list[str]] = SchemaField(
|
||||
description="Filter by categories", default=None
|
||||
)
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
memories: Any = SchemaField(description="List of memories")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="45aee5bf-4767-45d1-a28b-e01c5aae9fc1",
|
||||
description="Retrieve all memories from Mem0 with pagination",
|
||||
input_schema=GetAllMemoriesBlock.Input,
|
||||
output_schema=GetAllMemoriesBlock.Output,
|
||||
test_input={
|
||||
"user_id": "test_user",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
("memories", [{"id": "test-memory", "content": "test content"}]),
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
filters: Filter = {
|
||||
"AND": [
|
||||
{"user_id": user_id},
|
||||
]
|
||||
}
|
||||
if input_data.limit_memory_to_run:
|
||||
filters["AND"].append({"run_id": graph_exec_id})
|
||||
if input_data.limit_memory_to_agent:
|
||||
filters["AND"].append({"agent_id": graph_id})
|
||||
if input_data.categories:
|
||||
filters["AND"].append(
|
||||
{"categories": {"contains": input_data.categories}}
|
||||
)
|
||||
|
||||
memories: list[dict[str, Any]] = client.get_all(
|
||||
filters=filters,
|
||||
version="v2",
|
||||
)
|
||||
|
||||
yield "memories", memories
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
# Mock client for testing
|
||||
class MockMemoryClient:
|
||||
"""Mock Mem0 client for testing"""
|
||||
|
||||
def add(self, *args, **kwargs):
|
||||
return {"memory_id": "test-memory-id", "status": "success"}
|
||||
|
||||
def search(self, *args, **kwargs) -> list[dict[str, str]]:
|
||||
return [{"id": "test-memory", "content": "test content"}]
|
||||
|
||||
def get_all(self, *args, **kwargs) -> list[dict[str, str]]:
|
||||
return [{"id": "test-memory", "content": "test content"}]
|
||||
@@ -6,14 +6,13 @@ from backend.blocks.nvidia._auth import (
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.request import requests
|
||||
from backend.util.type import MediaFileType
|
||||
|
||||
|
||||
class NvidiaDeepfakeDetectBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: NvidiaCredentialsInput = NvidiaCredentialsField()
|
||||
image_base64: MediaFileType = SchemaField(
|
||||
description="Image to analyze for deepfakes",
|
||||
image_base64: str = SchemaField(
|
||||
description="Image to analyze for deepfakes", image_upload=True
|
||||
)
|
||||
return_image: bool = SchemaField(
|
||||
description="Whether to return the processed image with markings",
|
||||
@@ -23,12 +22,16 @@ class NvidiaDeepfakeDetectBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
status: str = SchemaField(
|
||||
description="Detection status (SUCCESS, ERROR, CONTENT_FILTERED)",
|
||||
default="",
|
||||
)
|
||||
image: MediaFileType = SchemaField(
|
||||
image: str = SchemaField(
|
||||
description="Processed image with detection markings (if return_image=True)",
|
||||
default="",
|
||||
image_output=True,
|
||||
)
|
||||
is_deepfake: float = SchemaField(
|
||||
description="Probability that the image is a deepfake (0-1)",
|
||||
default=0.0,
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
@@ -177,8 +177,7 @@ class PineconeInsertBlock(Block):
|
||||
description="Namespace to use in Pinecone", default=""
|
||||
)
|
||||
metadata: dict = SchemaField(
|
||||
description="Additional metadata to store with each vector",
|
||||
default_factory=dict,
|
||||
description="Additional metadata to store with each vector", default={}
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
|
||||
@@ -1,48 +1,22 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Iterator, Literal
|
||||
from typing import Iterator
|
||||
|
||||
import praw
|
||||
from pydantic import BaseModel, SecretStr
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
UserPasswordCredentials,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.data.model import BlockSecret, SchemaField, SecretField
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.settings import Settings
|
||||
|
||||
RedditCredentials = UserPasswordCredentials
|
||||
RedditCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.REDDIT],
|
||||
Literal["user_password"],
|
||||
]
|
||||
|
||||
|
||||
def RedditCredentialsField() -> RedditCredentialsInput:
|
||||
"""Creates a Reddit credentials input on a block."""
|
||||
return CredentialsField(
|
||||
description="The Reddit integration requires a username and password.",
|
||||
)
|
||||
class RedditCredentials(BaseModel):
|
||||
client_id: BlockSecret = SecretField(key="reddit_client_id")
|
||||
client_secret: BlockSecret = SecretField(key="reddit_client_secret")
|
||||
username: BlockSecret = SecretField(key="reddit_username")
|
||||
password: BlockSecret = SecretField(key="reddit_password")
|
||||
user_agent: str = "AutoGPT:1.0 (by /u/autogpt)"
|
||||
|
||||
|
||||
TEST_CREDENTIALS = UserPasswordCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="reddit",
|
||||
username=SecretStr("mock-reddit-username"),
|
||||
password=SecretStr("mock-reddit-password"),
|
||||
title="Mock Reddit credentials",
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
model_config = ConfigDict(title="Reddit Credentials")
|
||||
|
||||
|
||||
class RedditPost(BaseModel):
|
||||
@@ -57,16 +31,13 @@ class RedditComment(BaseModel):
|
||||
comment: str
|
||||
|
||||
|
||||
settings = Settings()
|
||||
|
||||
|
||||
def get_praw(creds: RedditCredentials) -> praw.Reddit:
|
||||
client = praw.Reddit(
|
||||
client_id=settings.secrets.reddit_client_id,
|
||||
client_secret=settings.secrets.reddit_client_secret,
|
||||
client_id=creds.client_id.get_secret_value(),
|
||||
client_secret=creds.client_secret.get_secret_value(),
|
||||
username=creds.username.get_secret_value(),
|
||||
password=creds.password.get_secret_value(),
|
||||
user_agent=settings.config.reddit_user_agent,
|
||||
user_agent=creds.user_agent,
|
||||
)
|
||||
me = client.user.me()
|
||||
if not me:
|
||||
@@ -77,11 +48,11 @@ def get_praw(creds: RedditCredentials) -> praw.Reddit:
|
||||
|
||||
class GetRedditPostsBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
subreddit: str = SchemaField(
|
||||
description="Subreddit name, excluding the /r/ prefix",
|
||||
default="writingprompts",
|
||||
subreddit: str = SchemaField(description="Subreddit name")
|
||||
creds: RedditCredentials = SchemaField(
|
||||
description="Reddit credentials",
|
||||
default=RedditCredentials(),
|
||||
)
|
||||
credentials: RedditCredentialsInput = RedditCredentialsField()
|
||||
last_minutes: int | None = SchemaField(
|
||||
description="Post time to stop minutes ago while fetching posts",
|
||||
default=None,
|
||||
@@ -99,18 +70,20 @@ class GetRedditPostsBlock(Block):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
disabled=True,
|
||||
id="c6731acb-4285-4ee1-bc9b-03d0766c370f",
|
||||
description="This block fetches Reddit posts from a defined subreddit name.",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
disabled=(
|
||||
not settings.secrets.reddit_client_id
|
||||
or not settings.secrets.reddit_client_secret
|
||||
),
|
||||
input_schema=GetRedditPostsBlock.Input,
|
||||
output_schema=GetRedditPostsBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"creds": {
|
||||
"client_id": "client_id",
|
||||
"client_secret": "client_secret",
|
||||
"username": "username",
|
||||
"password": "password",
|
||||
"user_agent": "user_agent",
|
||||
},
|
||||
"subreddit": "subreddit",
|
||||
"last_post": "id3",
|
||||
"post_limit": 2,
|
||||
@@ -130,7 +103,7 @@ class GetRedditPostsBlock(Block):
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_posts": lambda input_data, credentials: [
|
||||
"get_posts": lambda _: [
|
||||
MockObject(id="id1", title="title1", selftext="body1"),
|
||||
MockObject(id="id2", title="title2", selftext="body2"),
|
||||
MockObject(id="id3", title="title2", selftext="body2"),
|
||||
@@ -139,18 +112,14 @@ class GetRedditPostsBlock(Block):
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_posts(
|
||||
input_data: Input, *, credentials: RedditCredentials
|
||||
) -> Iterator[praw.reddit.Submission]:
|
||||
client = get_praw(credentials)
|
||||
def get_posts(input_data: Input) -> Iterator[praw.reddit.Submission]:
|
||||
client = get_praw(input_data.creds)
|
||||
subreddit = client.subreddit(input_data.subreddit)
|
||||
return subreddit.new(limit=input_data.post_limit or 10)
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: RedditCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
current_time = datetime.now(tz=timezone.utc)
|
||||
for post in self.get_posts(input_data=input_data, credentials=credentials):
|
||||
for post in self.get_posts(input_data):
|
||||
if input_data.last_minutes:
|
||||
post_datetime = datetime.fromtimestamp(
|
||||
post.created_utc, tz=timezone.utc
|
||||
@@ -172,7 +141,9 @@ class GetRedditPostsBlock(Block):
|
||||
|
||||
class PostRedditCommentBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
credentials: RedditCredentialsInput = RedditCredentialsField()
|
||||
creds: RedditCredentials = SchemaField(
|
||||
description="Reddit credentials", default=RedditCredentials()
|
||||
)
|
||||
data: RedditComment = SchemaField(description="Reddit comment")
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -185,15 +156,7 @@ class PostRedditCommentBlock(Block):
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=PostRedditCommentBlock.Input,
|
||||
output_schema=PostRedditCommentBlock.Output,
|
||||
disabled=(
|
||||
not settings.secrets.reddit_client_id
|
||||
or not settings.secrets.reddit_client_secret
|
||||
),
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"data": {"post_id": "id", "comment": "comment"},
|
||||
},
|
||||
test_input={"data": {"post_id": "id", "comment": "comment"}},
|
||||
test_output=[("comment_id", "dummy_comment_id")],
|
||||
test_mock={"reply_post": lambda creds, comment: "dummy_comment_id"},
|
||||
)
|
||||
@@ -207,7 +170,5 @@ class PostRedditCommentBlock(Block):
|
||||
raise ValueError("Failed to post comment.")
|
||||
return new_comment.id
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: RedditCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
yield "comment_id", self.reply_post(credentials, input_data.data)
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "comment_id", self.reply_post(input_data.creds, input_data.data)
|
||||
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
import replicate
|
||||
from pydantic import SecretStr
|
||||
from replicate.client import Client as ReplicateClient
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
@@ -131,7 +131,7 @@ class ReplicateFluxAdvancedModelBlock(Block):
|
||||
super().__init__(
|
||||
id="90f8c45e-e983-4644-aa0b-b4ebe2f531bc",
|
||||
description="This block runs Flux models on Replicate with advanced settings.",
|
||||
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=ReplicateFluxAdvancedModelBlock.Input,
|
||||
output_schema=ReplicateFluxAdvancedModelBlock.Output,
|
||||
test_input={
|
||||
@@ -198,7 +198,7 @@ class ReplicateFluxAdvancedModelBlock(Block):
|
||||
safety_tolerance,
|
||||
):
|
||||
# Initialize Replicate client with the API key
|
||||
client = ReplicateClient(api_token=api_key.get_secret_value())
|
||||
client = replicate.Client(api_token=api_key.get_secret_value())
|
||||
|
||||
# Run the model with additional parameters
|
||||
output: FileOutput | list[FileOutput] = client.run( # type: ignore This is because they changed the return type, and didn't update the type hint! It should be overloaded depending on the value of `use_file_output` to `FileOutput | list[FileOutput]` but it's `Any | Iterator[Any]`
|
||||
|
||||
@@ -1,176 +0,0 @@
|
||||
from base64 import b64encode
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.file import MediaFileType, store_media_file
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class Format(str, Enum):
|
||||
PNG = "png"
|
||||
JPEG = "jpeg"
|
||||
WEBP = "webp"
|
||||
|
||||
|
||||
class ScreenshotWebPageBlock(Block):
|
||||
"""Block for taking screenshots using ScreenshotOne API"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.SCREENSHOTONE], Literal["api_key"]
|
||||
] = CredentialsField(description="The ScreenshotOne API key")
|
||||
url: str = SchemaField(
|
||||
description="URL of the website to screenshot",
|
||||
placeholder="https://example.com",
|
||||
)
|
||||
viewport_width: int = SchemaField(
|
||||
description="Width of the viewport in pixels", default=1920
|
||||
)
|
||||
viewport_height: int = SchemaField(
|
||||
description="Height of the viewport in pixels", default=1080
|
||||
)
|
||||
full_page: bool = SchemaField(
|
||||
description="Whether to capture the full page length", default=False
|
||||
)
|
||||
format: Format = SchemaField(
|
||||
description="Output format (png, jpeg, webp)", default=Format.PNG
|
||||
)
|
||||
block_ads: bool = SchemaField(description="Whether to block ads", default=True)
|
||||
block_cookie_banners: bool = SchemaField(
|
||||
description="Whether to block cookie banners", default=True
|
||||
)
|
||||
block_chats: bool = SchemaField(
|
||||
description="Whether to block chat widgets", default=True
|
||||
)
|
||||
cache: bool = SchemaField(
|
||||
description="Whether to enable caching", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
image: MediaFileType = SchemaField(description="The screenshot image data")
|
||||
error: str = SchemaField(description="Error message if the screenshot failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3a7c4b8d-6e2f-4a5d-b9c1-f8d23c5a9b0e", # Generated UUID
|
||||
description="Takes a screenshot of a specified website using ScreenshotOne API",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=ScreenshotWebPageBlock.Input,
|
||||
output_schema=ScreenshotWebPageBlock.Output,
|
||||
test_input={
|
||||
"url": "https://example.com",
|
||||
"viewport_width": 1920,
|
||||
"viewport_height": 1080,
|
||||
"full_page": False,
|
||||
"format": "png",
|
||||
"block_ads": True,
|
||||
"block_cookie_banners": True,
|
||||
"block_chats": True,
|
||||
"cache": False,
|
||||
"credentials": {
|
||||
"provider": "screenshotone",
|
||||
"type": "api_key",
|
||||
"id": "test-id",
|
||||
"title": "Test API Key",
|
||||
},
|
||||
},
|
||||
test_credentials=APIKeyCredentials(
|
||||
id="test-id",
|
||||
provider="screenshotone",
|
||||
api_key=SecretStr("test-key"),
|
||||
title="Test API Key",
|
||||
expires_at=None,
|
||||
),
|
||||
test_output=[
|
||||
(
|
||||
"image",
|
||||
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"take_screenshot": lambda *args, **kwargs: {
|
||||
"image": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def take_screenshot(
|
||||
credentials: APIKeyCredentials,
|
||||
graph_exec_id: str,
|
||||
url: str,
|
||||
viewport_width: int,
|
||||
viewport_height: int,
|
||||
full_page: bool,
|
||||
format: Format,
|
||||
block_ads: bool,
|
||||
block_cookie_banners: bool,
|
||||
block_chats: bool,
|
||||
cache: bool,
|
||||
) -> dict:
|
||||
"""
|
||||
Takes a screenshot using the ScreenshotOne API
|
||||
"""
|
||||
api = Requests(trusted_origins=["https://api.screenshotone.com"])
|
||||
|
||||
# Build API URL with parameters
|
||||
params = {
|
||||
"access_key": credentials.api_key.get_secret_value(),
|
||||
"url": url,
|
||||
"viewport_width": viewport_width,
|
||||
"viewport_height": viewport_height,
|
||||
"full_page": str(full_page).lower(),
|
||||
"format": format.value,
|
||||
"block_ads": str(block_ads).lower(),
|
||||
"block_cookie_banners": str(block_cookie_banners).lower(),
|
||||
"block_chats": str(block_chats).lower(),
|
||||
"cache": str(cache).lower(),
|
||||
}
|
||||
|
||||
response = api.get("https://api.screenshotone.com/take", params=params)
|
||||
|
||||
return {
|
||||
"image": store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=MediaFileType(
|
||||
f"data:image/{format.value};base64,{b64encode(response.content).decode('utf-8')}"
|
||||
),
|
||||
return_content=True,
|
||||
)
|
||||
}
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
screenshot_data = self.take_screenshot(
|
||||
credentials=credentials,
|
||||
graph_exec_id=graph_exec_id,
|
||||
url=input_data.url,
|
||||
viewport_width=input_data.viewport_width,
|
||||
viewport_height=input_data.viewport_height,
|
||||
full_page=input_data.full_page,
|
||||
format=input_data.format,
|
||||
block_ads=input_data.block_ads,
|
||||
block_cookie_banners=input_data.block_cookie_banners,
|
||||
block_chats=input_data.block_chats,
|
||||
cache=input_data.cache,
|
||||
)
|
||||
yield "image", screenshot_data["image"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -8,7 +8,6 @@ from backend.data.block import (
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util import settings
|
||||
from backend.util.settings import AppEnvironment, BehaveAs
|
||||
|
||||
@@ -26,7 +25,7 @@ class Slant3DTriggerBase:
|
||||
class Input(BlockSchema):
|
||||
credentials: Slant3DCredentialsInput = Slant3DCredentialsField()
|
||||
# Webhook URL is handled by the webhook system
|
||||
payload: dict = SchemaField(hidden=True, default_factory=dict)
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
@@ -83,7 +82,7 @@ class Slant3DOrderWebhookBlock(Slant3DTriggerBase, Block):
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider=ProviderName.SLANT3D,
|
||||
provider="slant3d",
|
||||
webhook_type="orders", # Only one type for now
|
||||
resource_format="", # No resource format needed
|
||||
event_filter_input="events",
|
||||
|
||||
@@ -1,513 +0,0 @@
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
|
||||
import backend.blocks.llm as llm
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockInput,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockType,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util import json
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.data.graph import Link, Node
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@thread_cached
|
||||
def get_database_manager_client():
|
||||
from backend.executor import DatabaseManagerClient
|
||||
from backend.util.service import get_service_client
|
||||
|
||||
return get_service_client(DatabaseManagerClient)
|
||||
|
||||
|
||||
def _get_tool_requests(entry: dict[str, Any]) -> list[str]:
|
||||
"""
|
||||
Return a list of tool_call_ids if the entry is a tool request.
|
||||
Supports both OpenAI and Anthropics formats.
|
||||
"""
|
||||
tool_call_ids = []
|
||||
if entry.get("role") != "assistant":
|
||||
return tool_call_ids
|
||||
|
||||
# OpenAI: check for tool_calls in the entry.
|
||||
calls = entry.get("tool_calls")
|
||||
if isinstance(calls, list):
|
||||
for call in calls:
|
||||
if tool_id := call.get("id"):
|
||||
tool_call_ids.append(tool_id)
|
||||
|
||||
# Anthropics: check content items for tool_use type.
|
||||
content = entry.get("content")
|
||||
if isinstance(content, list):
|
||||
for item in content:
|
||||
if item.get("type") != "tool_use":
|
||||
continue
|
||||
if tool_id := item.get("id"):
|
||||
tool_call_ids.append(tool_id)
|
||||
|
||||
return tool_call_ids
|
||||
|
||||
|
||||
def _get_tool_responses(entry: dict[str, Any]) -> list[str]:
|
||||
"""
|
||||
Return a list of tool_call_ids if the entry is a tool response.
|
||||
Supports both OpenAI and Anthropics formats.
|
||||
"""
|
||||
tool_call_ids: list[str] = []
|
||||
|
||||
# OpenAI: a tool response message with role "tool" and key "tool_call_id".
|
||||
if entry.get("role") == "tool":
|
||||
if tool_call_id := entry.get("tool_call_id"):
|
||||
tool_call_ids.append(str(tool_call_id))
|
||||
|
||||
# Anthropics: check content items for tool_result type.
|
||||
if entry.get("role") == "user":
|
||||
content = entry.get("content")
|
||||
if isinstance(content, list):
|
||||
for item in content:
|
||||
if item.get("type") != "tool_result":
|
||||
continue
|
||||
if tool_call_id := item.get("tool_use_id"):
|
||||
tool_call_ids.append(tool_call_id)
|
||||
|
||||
return tool_call_ids
|
||||
|
||||
|
||||
def _create_tool_response(call_id: str, output: dict[str, Any]) -> dict[str, Any]:
|
||||
"""
|
||||
Create a tool response message for either OpenAI or Anthropics,
|
||||
based on the tool_id format.
|
||||
"""
|
||||
content = output if isinstance(output, str) else json.dumps(output)
|
||||
|
||||
# Anthropics format: tool IDs typically start with "toolu_"
|
||||
if call_id.startswith("toolu_"):
|
||||
return {
|
||||
"role": "user",
|
||||
"type": "message",
|
||||
"content": [
|
||||
{"tool_use_id": call_id, "type": "tool_result", "content": content}
|
||||
],
|
||||
}
|
||||
|
||||
# OpenAI format: tool IDs typically start with "call_".
|
||||
# Or default fallback (if the tool_id doesn't match any known prefix)
|
||||
return {"role": "tool", "tool_call_id": call_id, "content": content}
|
||||
|
||||
|
||||
def get_pending_tool_calls(conversation_history: list[Any]) -> dict[str, int]:
|
||||
"""
|
||||
All the tool calls entry in the conversation history requires a response.
|
||||
This function returns the pending tool calls that has not generated an output yet.
|
||||
|
||||
Return: dict[str, int] - A dictionary of pending tool call IDs with their count.
|
||||
"""
|
||||
pending_calls = Counter()
|
||||
for history in conversation_history:
|
||||
for call_id in _get_tool_requests(history):
|
||||
pending_calls[call_id] += 1
|
||||
|
||||
for call_id in _get_tool_responses(history):
|
||||
pending_calls[call_id] -= 1
|
||||
|
||||
return {call_id: count for call_id, count in pending_calls.items() if count > 0}
|
||||
|
||||
|
||||
class SmartDecisionMakerBlock(Block):
|
||||
"""
|
||||
A block that uses a language model to make smart decisions based on a given prompt.
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
prompt: str = SchemaField(
|
||||
description="The prompt to send to the language model.",
|
||||
placeholder="Enter your prompt here...",
|
||||
)
|
||||
model: llm.LlmModel = SchemaField(
|
||||
title="LLM Model",
|
||||
default=llm.LlmModel.GPT4O,
|
||||
description="The language model to use for answering the prompt.",
|
||||
advanced=False,
|
||||
)
|
||||
credentials: llm.AICredentials = llm.AICredentialsField()
|
||||
sys_prompt: str = SchemaField(
|
||||
title="System Prompt",
|
||||
default="Thinking carefully step by step decide which function to call. "
|
||||
"Always choose a function call from the list of function signatures, "
|
||||
"and always provide the complete argument provided with the type "
|
||||
"matching the required jsonschema signature, no missing argument is allowed. "
|
||||
"If you have already completed the task objective, you can end the task "
|
||||
"by providing the end result of your work as a finish message. "
|
||||
"Only provide EXACTLY one function call, multiple tool calls is strictly prohibited.",
|
||||
description="The system prompt to provide additional context to the model.",
|
||||
)
|
||||
conversation_history: list[dict] = SchemaField(
|
||||
default_factory=list,
|
||||
description="The conversation history to provide context for the prompt.",
|
||||
)
|
||||
last_tool_output: Any = SchemaField(
|
||||
default=None,
|
||||
description="The output of the last tool that was called.",
|
||||
)
|
||||
retry: int = SchemaField(
|
||||
title="Retry Count",
|
||||
default=3,
|
||||
description="Number of times to retry the LLM call if the response does not match the expected format.",
|
||||
)
|
||||
prompt_values: dict[str, str] = SchemaField(
|
||||
advanced=False,
|
||||
default_factory=dict,
|
||||
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
|
||||
)
|
||||
max_tokens: int | None = SchemaField(
|
||||
advanced=True,
|
||||
default=None,
|
||||
description="The maximum number of tokens to generate in the chat completion.",
|
||||
)
|
||||
ollama_host: str = SchemaField(
|
||||
advanced=True,
|
||||
default="localhost:11434",
|
||||
description="Ollama host for local models",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_missing_links(cls, data: BlockInput, links: list["Link"]) -> set[str]:
|
||||
# conversation_history & last_tool_output validation is handled differently
|
||||
missing_links = super().get_missing_links(
|
||||
data,
|
||||
[
|
||||
link
|
||||
for link in links
|
||||
if link.sink_name
|
||||
not in ["conversation_history", "last_tool_output"]
|
||||
],
|
||||
)
|
||||
|
||||
# Avoid executing the block if the last_tool_output is connected to a static
|
||||
# link, like StoreValueBlock or AgentInputBlock.
|
||||
if any(link.sink_name == "conversation_history" for link in links) and any(
|
||||
link.sink_name == "last_tool_output" and link.is_static
|
||||
for link in links
|
||||
):
|
||||
raise ValueError(
|
||||
"Last Tool Output can't be connected to a static (dashed line) "
|
||||
"link like the output of `StoreValue` or `AgentInput` block"
|
||||
)
|
||||
|
||||
return missing_links
|
||||
|
||||
@classmethod
|
||||
def get_missing_input(cls, data: BlockInput) -> set[str]:
|
||||
if missing_input := super().get_missing_input(data):
|
||||
return missing_input
|
||||
|
||||
conversation_history = data.get("conversation_history", [])
|
||||
pending_tool_calls = get_pending_tool_calls(conversation_history)
|
||||
last_tool_output = data.get("last_tool_output")
|
||||
if not last_tool_output and pending_tool_calls:
|
||||
return {"last_tool_output"}
|
||||
return set()
|
||||
|
||||
class Output(BlockSchema):
|
||||
error: str = SchemaField(description="Error message if the API call failed.")
|
||||
tools: Any = SchemaField(description="The tools that are available to use.")
|
||||
finished: str = SchemaField(
|
||||
description="The finished message to display to the user."
|
||||
)
|
||||
conversations: list[Any] = SchemaField(
|
||||
description="The conversation history to provide context for the prompt."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3b191d9f-356f-482d-8238-ba04b6d18381",
|
||||
description="Uses AI to intelligently decide what tool to use.",
|
||||
categories={BlockCategory.AI},
|
||||
block_type=BlockType.AI,
|
||||
input_schema=SmartDecisionMakerBlock.Input,
|
||||
output_schema=SmartDecisionMakerBlock.Output,
|
||||
test_input={
|
||||
"prompt": "Hello, World!",
|
||||
"credentials": llm.TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[],
|
||||
test_credentials=llm.TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def cleanup(s: str):
|
||||
return re.sub(r"[^a-zA-Z0-9_-]", "_", s).lower()
|
||||
|
||||
@staticmethod
|
||||
def _create_block_function_signature(
|
||||
sink_node: "Node", links: list["Link"]
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Creates a function signature for a block node.
|
||||
|
||||
Args:
|
||||
sink_node: The node for which to create a function signature.
|
||||
links: The list of links connected to the sink node.
|
||||
|
||||
Returns:
|
||||
A dictionary representing the function signature in the format expected by LLM tools.
|
||||
|
||||
Raises:
|
||||
ValueError: If the block specified by sink_node.block_id is not found.
|
||||
"""
|
||||
block = sink_node.block
|
||||
|
||||
tool_function: dict[str, Any] = {
|
||||
"name": SmartDecisionMakerBlock.cleanup(block.name),
|
||||
"description": block.description,
|
||||
}
|
||||
|
||||
properties = {}
|
||||
required = []
|
||||
|
||||
for link in links:
|
||||
sink_block_input_schema = block.input_schema
|
||||
description = (
|
||||
sink_block_input_schema.model_fields[link.sink_name].description
|
||||
if link.sink_name in sink_block_input_schema.model_fields
|
||||
and sink_block_input_schema.model_fields[link.sink_name].description
|
||||
else f"The {link.sink_name} of the tool"
|
||||
)
|
||||
properties[SmartDecisionMakerBlock.cleanup(link.sink_name)] = {
|
||||
"type": "string",
|
||||
"description": description,
|
||||
}
|
||||
|
||||
tool_function["parameters"] = {
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
"required": required,
|
||||
"additionalProperties": False,
|
||||
"strict": True,
|
||||
}
|
||||
|
||||
return {"type": "function", "function": tool_function}
|
||||
|
||||
@staticmethod
|
||||
def _create_agent_function_signature(
|
||||
sink_node: "Node", links: list["Link"]
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Creates a function signature for an agent node.
|
||||
|
||||
Args:
|
||||
sink_node: The agent node for which to create a function signature.
|
||||
links: The list of links connected to the sink node.
|
||||
|
||||
Returns:
|
||||
A dictionary representing the function signature in the format expected by LLM tools.
|
||||
|
||||
Raises:
|
||||
ValueError: If the graph metadata for the specified graph_id and graph_version is not found.
|
||||
"""
|
||||
graph_id = sink_node.input_default.get("graph_id")
|
||||
graph_version = sink_node.input_default.get("graph_version")
|
||||
if not graph_id or not graph_version:
|
||||
raise ValueError("Graph ID or Graph Version not found in sink node.")
|
||||
|
||||
db_client = get_database_manager_client()
|
||||
sink_graph_meta = db_client.get_graph_metadata(graph_id, graph_version)
|
||||
if not sink_graph_meta:
|
||||
raise ValueError(
|
||||
f"Sink graph metadata not found: {graph_id} {graph_version}"
|
||||
)
|
||||
|
||||
tool_function: dict[str, Any] = {
|
||||
"name": SmartDecisionMakerBlock.cleanup(sink_graph_meta.name),
|
||||
"description": sink_graph_meta.description,
|
||||
}
|
||||
|
||||
properties = {}
|
||||
required = []
|
||||
|
||||
for link in links:
|
||||
sink_block_input_schema = sink_node.input_default["input_schema"]
|
||||
description = (
|
||||
sink_block_input_schema["properties"][link.sink_name]["description"]
|
||||
if "description"
|
||||
in sink_block_input_schema["properties"][link.sink_name]
|
||||
else f"The {link.sink_name} of the tool"
|
||||
)
|
||||
properties[SmartDecisionMakerBlock.cleanup(link.sink_name)] = {
|
||||
"type": "string",
|
||||
"description": description,
|
||||
}
|
||||
|
||||
tool_function["parameters"] = {
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
"required": required,
|
||||
"additionalProperties": False,
|
||||
"strict": True,
|
||||
}
|
||||
|
||||
return {"type": "function", "function": tool_function}
|
||||
|
||||
@staticmethod
|
||||
def _create_function_signature(node_id: str) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Creates function signatures for tools linked to a specified node within a graph.
|
||||
|
||||
This method filters the graph links to identify those that are tools and are
|
||||
connected to the given node_id. It then constructs function signatures for each
|
||||
tool based on the metadata and input schema of the linked nodes.
|
||||
|
||||
Args:
|
||||
node_id: The node_id for which to create function signatures.
|
||||
|
||||
Returns:
|
||||
list[dict[str, Any]]: A list of dictionaries, each representing a function signature
|
||||
for a tool, including its name, description, and parameters.
|
||||
|
||||
Raises:
|
||||
ValueError: If no tool links are found for the specified node_id, or if a sink node
|
||||
or its metadata cannot be found.
|
||||
"""
|
||||
db_client = get_database_manager_client()
|
||||
tools = [
|
||||
(link, node)
|
||||
for link, node in db_client.get_connected_output_nodes(node_id)
|
||||
if link.source_name.startswith("tools_^_") and link.source_id == node_id
|
||||
]
|
||||
if not tools:
|
||||
raise ValueError("There is no next node to execute.")
|
||||
|
||||
return_tool_functions = []
|
||||
|
||||
grouped_tool_links: dict[str, tuple["Node", list["Link"]]] = {}
|
||||
for link, node in tools:
|
||||
if link.sink_id not in grouped_tool_links:
|
||||
grouped_tool_links[link.sink_id] = (node, [link])
|
||||
else:
|
||||
grouped_tool_links[link.sink_id][1].append(link)
|
||||
|
||||
for sink_node, links in grouped_tool_links.values():
|
||||
if not sink_node:
|
||||
raise ValueError(f"Sink node not found: {links[0].sink_id}")
|
||||
|
||||
if sink_node.block_id == AgentExecutorBlock().id:
|
||||
return_tool_functions.append(
|
||||
SmartDecisionMakerBlock._create_agent_function_signature(
|
||||
sink_node, links
|
||||
)
|
||||
)
|
||||
else:
|
||||
return_tool_functions.append(
|
||||
SmartDecisionMakerBlock._create_block_function_signature(
|
||||
sink_node, links
|
||||
)
|
||||
)
|
||||
|
||||
return return_tool_functions
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: llm.APIKeyCredentials,
|
||||
graph_id: str,
|
||||
node_id: str,
|
||||
graph_exec_id: str,
|
||||
node_exec_id: str,
|
||||
user_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
tool_functions = self._create_function_signature(node_id)
|
||||
|
||||
input_data.conversation_history = input_data.conversation_history or []
|
||||
prompt = [json.to_dict(p) for p in input_data.conversation_history if p]
|
||||
|
||||
pending_tool_calls = get_pending_tool_calls(input_data.conversation_history)
|
||||
if pending_tool_calls and not input_data.last_tool_output:
|
||||
raise ValueError(f"Tool call requires an output for {pending_tool_calls}")
|
||||
|
||||
# Prefill all missing tool calls with the last tool output/
|
||||
# TODO: we need a better way to handle this.
|
||||
tool_output = [
|
||||
_create_tool_response(pending_call_id, input_data.last_tool_output)
|
||||
for pending_call_id, count in pending_tool_calls.items()
|
||||
for _ in range(count)
|
||||
]
|
||||
|
||||
# If the SDM block only calls 1 tool at a time, this should not happen.
|
||||
if len(tool_output) > 1:
|
||||
logger.warning(
|
||||
f"[SmartDecisionMakerBlock-node_exec_id={node_exec_id}] "
|
||||
f"Multiple pending tool calls are prefilled using a single output. "
|
||||
f"Execution may not be accurate."
|
||||
)
|
||||
|
||||
# Fallback on adding tool output in the conversation history as user prompt.
|
||||
if len(tool_output) == 0 and input_data.last_tool_output:
|
||||
logger.warning(
|
||||
f"[SmartDecisionMakerBlock-node_exec_id={node_exec_id}] "
|
||||
f"No pending tool calls found. This may indicate an issue with the "
|
||||
f"conversation history, or an LLM calling two tools at the same time."
|
||||
)
|
||||
tool_output.append(
|
||||
{
|
||||
"role": "user",
|
||||
"content": f"Last tool output: {json.dumps(input_data.last_tool_output)}",
|
||||
}
|
||||
)
|
||||
|
||||
prompt.extend(tool_output)
|
||||
|
||||
values = input_data.prompt_values
|
||||
if values:
|
||||
input_data.prompt = llm.fmt.format_string(input_data.prompt, values)
|
||||
input_data.sys_prompt = llm.fmt.format_string(input_data.sys_prompt, values)
|
||||
|
||||
prefix = "[Main Objective Prompt]: "
|
||||
|
||||
if input_data.sys_prompt and not any(
|
||||
p["role"] == "system" and p["content"].startswith(prefix) for p in prompt
|
||||
):
|
||||
prompt.append({"role": "system", "content": prefix + input_data.sys_prompt})
|
||||
|
||||
if input_data.prompt and not any(
|
||||
p["role"] == "user" and p["content"].startswith(prefix) for p in prompt
|
||||
):
|
||||
prompt.append({"role": "user", "content": prefix + input_data.prompt})
|
||||
|
||||
response = llm.llm_call(
|
||||
credentials=credentials,
|
||||
llm_model=input_data.model,
|
||||
prompt=prompt,
|
||||
json_format=False,
|
||||
max_tokens=input_data.max_tokens,
|
||||
tools=tool_functions,
|
||||
ollama_host=input_data.ollama_host,
|
||||
parallel_tool_calls=False,
|
||||
)
|
||||
|
||||
if not response.tool_calls:
|
||||
yield "finished", response.response
|
||||
return
|
||||
|
||||
for tool_call in response.tool_calls:
|
||||
tool_name = tool_call.function.name
|
||||
tool_args = json.loads(tool_call.function.arguments)
|
||||
|
||||
for arg_name, arg_value in tool_args.items():
|
||||
yield f"tools_^_{tool_name}_~_{arg_name}", arg_value
|
||||
|
||||
response.prompt.append(response.raw_response)
|
||||
yield "conversations", response.prompt
|
||||
@@ -1,97 +0,0 @@
|
||||
from backend.blocks.smartlead.models import (
|
||||
AddLeadsRequest,
|
||||
AddLeadsToCampaignResponse,
|
||||
CreateCampaignRequest,
|
||||
CreateCampaignResponse,
|
||||
SaveSequencesRequest,
|
||||
SaveSequencesResponse,
|
||||
)
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class SmartLeadClient:
|
||||
"""Client for the SmartLead API"""
|
||||
|
||||
# This api is stupid and requires your api key in the url. DO NOT RAISE ERRORS FOR BAD REQUESTS.
|
||||
# FILTER OUT THE API KEY FROM THE ERROR MESSAGE.
|
||||
|
||||
API_URL = "https://server.smartlead.ai/api/v1"
|
||||
|
||||
def __init__(self, api_key: str):
|
||||
self.api_key = api_key
|
||||
self.requests = Requests()
|
||||
|
||||
def _add_auth_to_url(self, url: str) -> str:
|
||||
return f"{url}?api_key={self.api_key}"
|
||||
|
||||
def _handle_error(self, e: Exception) -> str:
|
||||
return e.__str__().replace(self.api_key, "API KEY")
|
||||
|
||||
def create_campaign(self, request: CreateCampaignRequest) -> CreateCampaignResponse:
|
||||
try:
|
||||
response = self.requests.post(
|
||||
self._add_auth_to_url(f"{self.API_URL}/campaigns/create"),
|
||||
json=request.model_dump(),
|
||||
)
|
||||
response_data = response.json()
|
||||
return CreateCampaignResponse(**response_data)
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid response format: {str(e)}")
|
||||
except Exception as e:
|
||||
raise ValueError(f"Failed to create campaign: {self._handle_error(e)}")
|
||||
|
||||
def add_leads_to_campaign(
|
||||
self, request: AddLeadsRequest
|
||||
) -> AddLeadsToCampaignResponse:
|
||||
try:
|
||||
response = self.requests.post(
|
||||
self._add_auth_to_url(
|
||||
f"{self.API_URL}/campaigns/{request.campaign_id}/leads"
|
||||
),
|
||||
json=request.model_dump(exclude={"campaign_id"}),
|
||||
)
|
||||
response_data = response.json()
|
||||
response_parsed = AddLeadsToCampaignResponse(**response_data)
|
||||
if not response_parsed.ok:
|
||||
raise ValueError(
|
||||
f"Failed to add leads to campaign: {response_parsed.error}"
|
||||
)
|
||||
return response_parsed
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid response format: {str(e)}")
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Failed to add leads to campaign: {self._handle_error(e)}"
|
||||
)
|
||||
|
||||
def save_campaign_sequences(
|
||||
self, campaign_id: int, request: SaveSequencesRequest
|
||||
) -> SaveSequencesResponse:
|
||||
"""
|
||||
Save sequences within a campaign.
|
||||
|
||||
Args:
|
||||
campaign_id: ID of the campaign to save sequences for
|
||||
request: SaveSequencesRequest containing the sequences configuration
|
||||
|
||||
Returns:
|
||||
SaveSequencesResponse with the result of the operation
|
||||
|
||||
Note:
|
||||
For variant_distribution_type:
|
||||
- MANUAL_EQUAL: Equally distributes variants across leads
|
||||
- AI_EQUAL: Requires winning_metric_property and lead_distribution_percentage
|
||||
- MANUAL_PERCENTAGE: Requires variant_distribution_percentage in seq_variants
|
||||
"""
|
||||
try:
|
||||
response = self.requests.post(
|
||||
self._add_auth_to_url(
|
||||
f"{self.API_URL}/campaigns/{campaign_id}/sequences"
|
||||
),
|
||||
json=request.model_dump(exclude_none=True),
|
||||
)
|
||||
return SaveSequencesResponse(**response.json())
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"Failed to save campaign sequences: {e.__str__().replace(self.api_key, 'API KEY')}"
|
||||
)
|
||||
@@ -1,35 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
SmartLeadCredentials = APIKeyCredentials
|
||||
SmartLeadCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.SMARTLEAD],
|
||||
Literal["api_key"],
|
||||
]
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="smartlead",
|
||||
api_key=SecretStr("mock-smartlead-api-key"),
|
||||
title="Mock SmartLead API key",
|
||||
expires_at=None,
|
||||
)
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
def SmartLeadCredentialsField() -> SmartLeadCredentialsInput:
|
||||
"""
|
||||
Creates a SmartLead credentials input on a block.
|
||||
"""
|
||||
return CredentialsField(
|
||||
description="The SmartLead integration can be used with an API Key.",
|
||||
)
|
||||
@@ -1,326 +0,0 @@
|
||||
from backend.blocks.smartlead._api import SmartLeadClient
|
||||
from backend.blocks.smartlead._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
SmartLeadCredentials,
|
||||
SmartLeadCredentialsInput,
|
||||
)
|
||||
from backend.blocks.smartlead.models import (
|
||||
AddLeadsRequest,
|
||||
AddLeadsToCampaignResponse,
|
||||
CreateCampaignRequest,
|
||||
CreateCampaignResponse,
|
||||
LeadInput,
|
||||
LeadUploadSettings,
|
||||
SaveSequencesRequest,
|
||||
SaveSequencesResponse,
|
||||
Sequence,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class CreateCampaignBlock(Block):
|
||||
"""Create a campaign in SmartLead"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
name: str = SchemaField(
|
||||
description="The name of the campaign",
|
||||
)
|
||||
credentials: SmartLeadCredentialsInput = SchemaField(
|
||||
description="SmartLead credentials",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: int = SchemaField(
|
||||
description="The ID of the created campaign",
|
||||
)
|
||||
name: str = SchemaField(
|
||||
description="The name of the created campaign",
|
||||
)
|
||||
created_at: str = SchemaField(
|
||||
description="The date and time the campaign was created",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the search failed",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8865699f-9188-43c4-89b0-79c84cfaa03e",
|
||||
description="Create a campaign in SmartLead",
|
||||
categories={BlockCategory.CRM},
|
||||
input_schema=CreateCampaignBlock.Input,
|
||||
output_schema=CreateCampaignBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={"name": "Test Campaign", "credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_output=[
|
||||
(
|
||||
"id",
|
||||
1,
|
||||
),
|
||||
(
|
||||
"name",
|
||||
"Test Campaign",
|
||||
),
|
||||
(
|
||||
"created_at",
|
||||
"2024-01-01T00:00:00Z",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"create_campaign": lambda name, credentials: CreateCampaignResponse(
|
||||
ok=True,
|
||||
id=1,
|
||||
name=name,
|
||||
created_at="2024-01-01T00:00:00Z",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_campaign(
|
||||
name: str, credentials: SmartLeadCredentials
|
||||
) -> CreateCampaignResponse:
|
||||
client = SmartLeadClient(credentials.api_key.get_secret_value())
|
||||
return client.create_campaign(CreateCampaignRequest(name=name))
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: SmartLeadCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
response = self.create_campaign(input_data.name, credentials)
|
||||
|
||||
yield "id", response.id
|
||||
yield "name", response.name
|
||||
yield "created_at", response.created_at
|
||||
if not response.ok:
|
||||
yield "error", "Failed to create campaign"
|
||||
|
||||
|
||||
class AddLeadToCampaignBlock(Block):
|
||||
"""Add a lead to a campaign in SmartLead"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
campaign_id: int = SchemaField(
|
||||
description="The ID of the campaign to add the lead to",
|
||||
)
|
||||
lead_list: list[LeadInput] = SchemaField(
|
||||
description="An array of JSON objects, each representing a lead's details. Can hold max 100 leads.",
|
||||
max_length=100,
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
settings: LeadUploadSettings = SchemaField(
|
||||
description="Settings for lead upload",
|
||||
default=LeadUploadSettings(),
|
||||
)
|
||||
credentials: SmartLeadCredentialsInput = SchemaField(
|
||||
description="SmartLead credentials",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
campaign_id: int = SchemaField(
|
||||
description="The ID of the campaign the lead was added to (passed through)",
|
||||
)
|
||||
upload_count: int = SchemaField(
|
||||
description="The number of leads added to the campaign",
|
||||
)
|
||||
already_added_to_campaign: int = SchemaField(
|
||||
description="The number of leads that were already added to the campaign",
|
||||
)
|
||||
duplicate_count: int = SchemaField(
|
||||
description="The number of emails that were duplicates",
|
||||
)
|
||||
invalid_email_count: int = SchemaField(
|
||||
description="The number of emails that were invalidly formatted",
|
||||
)
|
||||
is_lead_limit_exhausted: bool = SchemaField(
|
||||
description="Whether the lead limit was exhausted",
|
||||
)
|
||||
lead_import_stopped_count: int = SchemaField(
|
||||
description="The number of leads that were not added to the campaign because the lead import was stopped",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the lead was not added to the campaign",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="fb8106a4-1a8f-42f9-a502-f6d07e6fe0ec",
|
||||
description="Add a lead to a campaign in SmartLead",
|
||||
categories={BlockCategory.CRM},
|
||||
input_schema=AddLeadToCampaignBlock.Input,
|
||||
output_schema=AddLeadToCampaignBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"campaign_id": 1,
|
||||
"lead_list": [],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"campaign_id",
|
||||
1,
|
||||
),
|
||||
(
|
||||
"upload_count",
|
||||
1,
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"add_leads_to_campaign": lambda campaign_id, lead_list, credentials: AddLeadsToCampaignResponse(
|
||||
ok=True,
|
||||
upload_count=1,
|
||||
already_added_to_campaign=0,
|
||||
duplicate_count=0,
|
||||
invalid_email_count=0,
|
||||
is_lead_limit_exhausted=False,
|
||||
lead_import_stopped_count=0,
|
||||
error="",
|
||||
total_leads=1,
|
||||
block_count=0,
|
||||
invalid_emails=[],
|
||||
unsubscribed_leads=[],
|
||||
bounce_count=0,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_leads_to_campaign(
|
||||
campaign_id: int, lead_list: list[LeadInput], credentials: SmartLeadCredentials
|
||||
) -> AddLeadsToCampaignResponse:
|
||||
client = SmartLeadClient(credentials.api_key.get_secret_value())
|
||||
return client.add_leads_to_campaign(
|
||||
AddLeadsRequest(
|
||||
campaign_id=campaign_id,
|
||||
lead_list=lead_list,
|
||||
settings=LeadUploadSettings(
|
||||
ignore_global_block_list=False,
|
||||
ignore_unsubscribe_list=False,
|
||||
ignore_community_bounce_list=False,
|
||||
ignore_duplicate_leads_in_other_campaign=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: SmartLeadCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
response = self.add_leads_to_campaign(
|
||||
input_data.campaign_id, input_data.lead_list, credentials
|
||||
)
|
||||
|
||||
yield "campaign_id", input_data.campaign_id
|
||||
yield "upload_count", response.upload_count
|
||||
if response.already_added_to_campaign:
|
||||
yield "already_added_to_campaign", response.already_added_to_campaign
|
||||
if response.duplicate_count:
|
||||
yield "duplicate_count", response.duplicate_count
|
||||
if response.invalid_email_count:
|
||||
yield "invalid_email_count", response.invalid_email_count
|
||||
if response.is_lead_limit_exhausted:
|
||||
yield "is_lead_limit_exhausted", response.is_lead_limit_exhausted
|
||||
if response.lead_import_stopped_count:
|
||||
yield "lead_import_stopped_count", response.lead_import_stopped_count
|
||||
if response.error:
|
||||
yield "error", response.error
|
||||
if not response.ok:
|
||||
yield "error", "Failed to add leads to campaign"
|
||||
|
||||
|
||||
class SaveCampaignSequencesBlock(Block):
|
||||
"""Save sequences within a campaign"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
campaign_id: int = SchemaField(
|
||||
description="The ID of the campaign to save sequences for",
|
||||
)
|
||||
sequences: list[Sequence] = SchemaField(
|
||||
description="The sequences to save",
|
||||
default_factory=list,
|
||||
advanced=False,
|
||||
)
|
||||
credentials: SmartLeadCredentialsInput = SchemaField(
|
||||
description="SmartLead credentials",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
data: dict | str | None = SchemaField(
|
||||
description="Data from the API",
|
||||
default=None,
|
||||
)
|
||||
message: str = SchemaField(
|
||||
description="Message from the API",
|
||||
default="",
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the sequences were not saved",
|
||||
default="",
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="e7d9f41c-dc10-4f39-98ba-a432abd128c0",
|
||||
description="Save sequences within a campaign",
|
||||
categories={BlockCategory.CRM},
|
||||
input_schema=SaveCampaignSequencesBlock.Input,
|
||||
output_schema=SaveCampaignSequencesBlock.Output,
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_input={
|
||||
"campaign_id": 1,
|
||||
"sequences": [],
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"message",
|
||||
"Sequences saved successfully",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"save_campaign_sequences": lambda campaign_id, sequences, credentials: SaveSequencesResponse(
|
||||
ok=True,
|
||||
message="Sequences saved successfully",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def save_campaign_sequences(
|
||||
campaign_id: int, sequences: list[Sequence], credentials: SmartLeadCredentials
|
||||
) -> SaveSequencesResponse:
|
||||
client = SmartLeadClient(credentials.api_key.get_secret_value())
|
||||
return client.save_campaign_sequences(
|
||||
campaign_id=campaign_id, request=SaveSequencesRequest(sequences=sequences)
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: SmartLeadCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
response = self.save_campaign_sequences(
|
||||
input_data.campaign_id, input_data.sequences, credentials
|
||||
)
|
||||
|
||||
if response.data:
|
||||
yield "data", response.data
|
||||
if response.message:
|
||||
yield "message", response.message
|
||||
if response.error:
|
||||
yield "error", response.error
|
||||
if not response.ok:
|
||||
yield "error", "Failed to save sequences"
|
||||
@@ -1,147 +0,0 @@
|
||||
from enum import Enum
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class CreateCampaignResponse(BaseModel):
|
||||
ok: bool
|
||||
id: int
|
||||
name: str
|
||||
created_at: str
|
||||
|
||||
|
||||
class CreateCampaignRequest(BaseModel):
|
||||
name: str
|
||||
client_id: str | None = None
|
||||
|
||||
|
||||
class AddLeadsToCampaignResponse(BaseModel):
|
||||
ok: bool
|
||||
upload_count: int
|
||||
total_leads: int
|
||||
block_count: int
|
||||
duplicate_count: int
|
||||
invalid_email_count: int
|
||||
invalid_emails: list[str]
|
||||
already_added_to_campaign: int
|
||||
unsubscribed_leads: list[str]
|
||||
is_lead_limit_exhausted: bool
|
||||
lead_import_stopped_count: int
|
||||
bounce_count: int
|
||||
error: str | None = None
|
||||
|
||||
|
||||
class LeadCustomFields(BaseModel):
|
||||
"""Custom fields for a lead (max 20 fields)"""
|
||||
|
||||
fields: dict[str, str] = SchemaField(
|
||||
description="Custom fields for a lead (max 20 fields)",
|
||||
max_length=20,
|
||||
default_factory=dict,
|
||||
)
|
||||
|
||||
|
||||
class LeadInput(BaseModel):
|
||||
"""Single lead input data"""
|
||||
|
||||
first_name: str
|
||||
last_name: str
|
||||
email: str
|
||||
phone_number: str | None = None # Changed from int to str for phone numbers
|
||||
company_name: str | None = None
|
||||
website: str | None = None
|
||||
location: str | None = None
|
||||
custom_fields: LeadCustomFields | None = None
|
||||
linkedin_profile: str | None = None
|
||||
company_url: str | None = None
|
||||
|
||||
|
||||
class LeadUploadSettings(BaseModel):
|
||||
"""Settings for lead upload"""
|
||||
|
||||
ignore_global_block_list: bool = SchemaField(
|
||||
description="Ignore the global block list",
|
||||
default=False,
|
||||
)
|
||||
ignore_unsubscribe_list: bool = SchemaField(
|
||||
description="Ignore the unsubscribe list",
|
||||
default=False,
|
||||
)
|
||||
ignore_community_bounce_list: bool = SchemaField(
|
||||
description="Ignore the community bounce list",
|
||||
default=False,
|
||||
)
|
||||
ignore_duplicate_leads_in_other_campaign: bool = SchemaField(
|
||||
description="Ignore duplicate leads in other campaigns",
|
||||
default=False,
|
||||
)
|
||||
|
||||
|
||||
class AddLeadsRequest(BaseModel):
|
||||
"""Request body for adding leads to a campaign"""
|
||||
|
||||
lead_list: list[LeadInput] = SchemaField(
|
||||
description="List of leads to add to the campaign",
|
||||
max_length=100,
|
||||
default_factory=list,
|
||||
)
|
||||
settings: LeadUploadSettings
|
||||
campaign_id: int
|
||||
|
||||
|
||||
class VariantDistributionType(str, Enum):
|
||||
MANUAL_EQUAL = "MANUAL_EQUAL"
|
||||
MANUAL_PERCENTAGE = "MANUAL_PERCENTAGE"
|
||||
AI_EQUAL = "AI_EQUAL"
|
||||
|
||||
|
||||
class WinningMetricProperty(str, Enum):
|
||||
OPEN_RATE = "OPEN_RATE"
|
||||
CLICK_RATE = "CLICK_RATE"
|
||||
REPLY_RATE = "REPLY_RATE"
|
||||
POSITIVE_REPLY_RATE = "POSITIVE_REPLY_RATE"
|
||||
|
||||
|
||||
class SequenceDelayDetails(BaseModel):
|
||||
delay_in_days: int
|
||||
|
||||
|
||||
class SequenceVariant(BaseModel):
|
||||
subject: str
|
||||
email_body: str
|
||||
variant_label: str
|
||||
id: int | None = None # Optional for creation, required for updates
|
||||
variant_distribution_percentage: int | None = None
|
||||
|
||||
|
||||
class Sequence(BaseModel):
|
||||
seq_number: int = SchemaField(
|
||||
description="The sequence number",
|
||||
default=1,
|
||||
)
|
||||
seq_delay_details: SequenceDelayDetails
|
||||
id: int | None = None
|
||||
variant_distribution_type: VariantDistributionType | None = None
|
||||
lead_distribution_percentage: int | None = SchemaField(
|
||||
None, ge=20, le=100
|
||||
) # >= 20% for fair calculation
|
||||
winning_metric_property: WinningMetricProperty | None = None
|
||||
seq_variants: list[SequenceVariant] | None = None
|
||||
subject: str = "" # blank makes the follow up in the same thread
|
||||
email_body: str | None = None
|
||||
|
||||
|
||||
class SaveSequencesRequest(BaseModel):
|
||||
sequences: list[Sequence]
|
||||
|
||||
|
||||
class SaveSequencesResponse(BaseModel):
|
||||
ok: bool
|
||||
message: str = SchemaField(
|
||||
description="Message from the API",
|
||||
default="",
|
||||
)
|
||||
data: dict | str | None = None
|
||||
error: str | None = None
|
||||
@@ -78,7 +78,7 @@ class CreateTalkingAvatarVideoBlock(Block):
|
||||
super().__init__(
|
||||
id="98c6f503-8c47-4b1c-a96d-351fc7c87dab",
|
||||
description="This block integrates with D-ID to create video clips and retrieve their URLs.",
|
||||
categories={BlockCategory.AI, BlockCategory.MULTIMEDIA},
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=CreateTalkingAvatarVideoBlock.Input,
|
||||
output_schema=CreateTalkingAvatarVideoBlock.Output,
|
||||
test_input={
|
||||
|
||||
@@ -76,8 +76,6 @@ class ExtractTextInformationBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
positive: str = SchemaField(description="Extracted text")
|
||||
negative: str = SchemaField(description="Original text")
|
||||
matched_results: list[str] = SchemaField(description="List of matched results")
|
||||
matched_count: int = SchemaField(description="Number of matched results")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -105,31 +103,13 @@ class ExtractTextInformationBlock(Block):
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
# Test case 1
|
||||
("positive", "World!"),
|
||||
("matched_results", ["World!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 2
|
||||
("positive", "Hello, World!"),
|
||||
("matched_results", ["Hello, World!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 3
|
||||
("negative", "Hello, World!"),
|
||||
("matched_results", []),
|
||||
("matched_count", 0),
|
||||
# Test case 4
|
||||
("positive", "Hello,"),
|
||||
("matched_results", ["Hello,"]),
|
||||
("matched_count", 1),
|
||||
# Test case 5
|
||||
("positive", "World!!"),
|
||||
("matched_results", ["World!!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 6
|
||||
("positive", "World!!"),
|
||||
("positive", "Earth!!"),
|
||||
("matched_results", ["World!!", "Earth!!"]),
|
||||
("matched_count", 2),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -150,16 +130,13 @@ class ExtractTextInformationBlock(Block):
|
||||
for match in re.finditer(input_data.pattern, txt, flags)
|
||||
if input_data.group <= len(match.groups())
|
||||
]
|
||||
if not input_data.find_all:
|
||||
matches = matches[:1]
|
||||
for match in matches:
|
||||
yield "positive", match
|
||||
if not input_data.find_all:
|
||||
return
|
||||
if not matches:
|
||||
yield "negative", input_data.text
|
||||
|
||||
yield "matched_results", matches
|
||||
yield "matched_count", len(matches)
|
||||
|
||||
|
||||
class FillTextTemplateBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
@@ -235,71 +212,3 @@ class CombineTextsBlock(Block):
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
combined_text = input_data.delimiter.join(input_data.input)
|
||||
yield "output", combined_text
|
||||
|
||||
|
||||
class TextSplitBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to split.")
|
||||
delimiter: str = SchemaField(description="The delimiter to split the text by.")
|
||||
strip: bool = SchemaField(
|
||||
description="Whether to strip the text.", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
texts: list[str] = SchemaField(
|
||||
description="The text split into a list of strings."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d5ea33c8-a575-477a-b42f-2fe3be5055ec",
|
||||
description="This block is used to split a text into a list of strings.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextSplitBlock.Input,
|
||||
output_schema=TextSplitBlock.Output,
|
||||
test_input=[
|
||||
{"text": "Hello, World!", "delimiter": ","},
|
||||
{"text": "Hello, World!", "delimiter": ",", "strip": False},
|
||||
],
|
||||
test_output=[
|
||||
("texts", ["Hello", "World!"]),
|
||||
("texts", ["Hello", " World!"]),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
if len(input_data.text) == 0:
|
||||
yield "texts", []
|
||||
else:
|
||||
texts = input_data.text.split(input_data.delimiter)
|
||||
if input_data.strip:
|
||||
texts = [text.strip() for text in texts]
|
||||
yield "texts", texts
|
||||
|
||||
|
||||
class TextReplaceBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to replace.")
|
||||
old: str = SchemaField(description="The old text to replace.")
|
||||
new: str = SchemaField(description="The new text to replace with.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: str = SchemaField(description="The text with the replaced text.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7e7c87ab-3469-4bcc-9abe-67705091b713",
|
||||
description="This block is used to replace a text with a new text.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextReplaceBlock.Input,
|
||||
output_schema=TextReplaceBlock.Output,
|
||||
test_input=[
|
||||
{"text": "Hello, World!", "old": "Hello", "new": "Hi"},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hi, World!"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "output", input_data.text.replace(input_data.old, input_data.new)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user