Compare commits

..

24 Commits

Author SHA1 Message Date
abhi1992002
6c94676e76 fix yarn.lock 2024-12-30 11:04:03 +05:30
abhi1992002
694b9df9da fix backend tests 2024-12-30 10:53:54 +05:30
abhi1992002
3c264f162c fix expansions in tweet block 2024-12-30 10:53:54 +05:30
abhi1992002
ce591cefda fix tweet blocks expansions 2024-12-30 10:53:53 +05:30
abhi1992002
75ba8c2e83 1. add optional datetime and multi select support 2024-12-30 10:53:53 +05:30
Nicholas Tindle
ebd69374a6 fix: linting came back with a vengence 2024-12-30 10:53:53 +05:30
Nicholas Tindle
ea13d970ab fix: credentials issues 2024-12-30 10:53:53 +05:30
Nicholas Tindle
3cc0be9234 fix: only add tweepy (and down bumb related :() 2024-12-30 10:53:53 +05:30
Nicholas Tindle
36af4a53f8 fix: project problems 2024-12-30 10:53:53 +05:30
Nicholas Tindle
431a87ab6d Discard changes to autogpt_platform/frontend/yarn.lock 2024-12-30 10:53:53 +05:30
Nicholas Tindle
d6297c43fd fix: formatting 2024-12-30 10:53:53 +05:30
abhi
023c5b9a17 fix:oauth2 2024-12-30 10:53:53 +05:30
Abhimanyu Yadav
3344b7f51e Update credentials-input.tsx 2024-12-30 10:53:53 +05:30
Abhimanyu Yadav
159af2b89a Update credentials-provider.tsx 2024-12-30 10:53:53 +05:30
abhi
67b17475b1 fix multiselect 2024-12-30 10:53:53 +05:30
abhi
2968453f18 fix multiselect 2024-12-30 10:53:53 +05:30
abhi
db57cecb1f initial changes 2024-12-30 10:53:53 +05:30
abhi1992002
1678a1c69c refactor: Enhance Supabase integration and Twitter OAuth handling
- Updated `store.py` to improve state token management by adding PKCE support and simplifying code challenge generation.
- Modified environment variable names in `.env.example` for consistency.
- Removed unnecessary `is_multi_select` attributes from various Twitter-related input schemas to streamline the code.
- Cleaned up exception handling in Twitter list management and tweet management blocks by removing redundant error logging.
- Removed debug print statements from various components to clean up the codebase.
- Fixed a minor error message in the Twitter OAuth handler for clarity.
2024-12-30 10:53:53 +05:30
Abhimanyu Yadav
49a24f73b7 Update pyproject.toml 2024-12-30 10:53:53 +05:30
Abhimanyu Yadav
794dd4f59c Update credentials-input.tsx 2024-12-30 10:53:53 +05:30
Abhimanyu Yadav
3fb3099814 Update credentials-provider.tsx 2024-12-30 10:53:53 +05:30
abhi1992002
01185c55a1 fix: test 2024-12-30 10:53:53 +05:30
abhi1992002
e9e7f28cad fix linting 2024-12-30 10:53:53 +05:30
abhi1992002
ca26b298b6 add twitter credentials with some frontend changes
# Conflicts:
#	autogpt_platform/backend/backend/data/model.py
#	autogpt_platform/backend/pyproject.toml
#	autogpt_platform/frontend/src/components/integrations/credentials-input.tsx
2024-12-30 10:53:53 +05:30
164 changed files with 6872 additions and 8077 deletions

View File

@@ -1,61 +1,40 @@
# Ignore everything by default, selectively add things to context
*
classic/run
# Platform - Libs
!autogpt_platform/autogpt_libs/autogpt_libs/
!autogpt_platform/autogpt_libs/pyproject.toml
!autogpt_platform/autogpt_libs/poetry.lock
!autogpt_platform/autogpt_libs/README.md
# Platform - Backend
!autogpt_platform/backend/backend/
!autogpt_platform/backend/migrations/
!autogpt_platform/backend/schema.prisma
!autogpt_platform/backend/pyproject.toml
!autogpt_platform/backend/poetry.lock
!autogpt_platform/backend/README.md
# Platform - Market
!autogpt_platform/market/market/
!autogpt_platform/market/scripts.py
!autogpt_platform/market/schema.prisma
!autogpt_platform/market/pyproject.toml
!autogpt_platform/market/poetry.lock
!autogpt_platform/market/README.md
# Platform - Frontend
!autogpt_platform/frontend/src/
!autogpt_platform/frontend/public/
!autogpt_platform/frontend/package.json
!autogpt_platform/frontend/yarn.lock
!autogpt_platform/frontend/tsconfig.json
!autogpt_platform/frontend/README.md
## config
!autogpt_platform/frontend/*.config.*
!autogpt_platform/frontend/.env.*
# Classic - AutoGPT
# AutoGPT
!classic/original_autogpt/autogpt/
!classic/original_autogpt/pyproject.toml
!classic/original_autogpt/poetry.lock
!classic/original_autogpt/README.md
!classic/original_autogpt/tests/
# Classic - Benchmark
# Benchmark
!classic/benchmark/agbenchmark/
!classic/benchmark/pyproject.toml
!classic/benchmark/poetry.lock
!classic/benchmark/README.md
# Classic - Forge
# Forge
!classic/forge/
!classic/forge/pyproject.toml
!classic/forge/poetry.lock
!classic/forge/README.md
# Classic - Frontend
# Frontend
!classic/frontend/build/web/
# Platform
!autogpt_platform/
# Explicitly re-ignore some folders
.*
**/__pycache__
autogpt_platform/frontend/.next/
autogpt_platform/frontend/node_modules
autogpt_platform/frontend/.env.example
autogpt_platform/frontend/.env.local
autogpt_platform/backend/.env
autogpt_platform/backend/.venv/
autogpt_platform/market/.env

View File

@@ -89,6 +89,28 @@ updates:
- "minor"
- "patch"
# market (Poetry project)
- package-ecosystem: "pip"
directory: "autogpt_platform/market"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: "dev"
commit-message:
prefix: "chore(market/deps)"
prefix-development: "chore(market/deps-dev)"
groups:
production-dependencies:
dependency-type: "production"
update-types:
- "minor"
- "patch"
development-dependencies:
dependency-type: "development"
update-types:
- "minor"
- "patch"
# GitHub Actions
- package-ecosystem: "github-actions"

View File

@@ -35,6 +35,12 @@ jobs:
env:
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
- name: Run Market Migrations
working-directory: ./autogpt_platform/market
run: |
python -m prisma migrate deploy
env:
DATABASE_URL: ${{ secrets.MARKET_DATABASE_URL }}
trigger:
needs: migrate

View File

@@ -37,6 +37,13 @@ jobs:
env:
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
- name: Run Market Migrations
working-directory: ./autogpt_platform/market
run: |
python -m prisma migrate deploy
env:
DATABASE_URL: ${{ secrets.MARKET_DATABASE_URL }}
trigger:
needs: migrate
runs-on: ubuntu-latest

View File

@@ -81,7 +81,7 @@ jobs:
- name: Check poetry.lock
run: |
poetry lock
poetry lock --no-update
if ! git diff --quiet poetry.lock; then
echo "Error: poetry.lock not up to date."

View File

@@ -88,11 +88,6 @@ jobs:
run: |
yarn test --project=${{ matrix.browser }}
- name: Print Docker Compose logs in debug mode
if: runner.debug
run: |
docker compose -f ../docker-compose.yml logs
- uses: actions/upload-artifact@v4
if: ${{ !cancelled() }}
with:

126
.github/workflows/platform-market-ci.yml vendored Normal file
View File

@@ -0,0 +1,126 @@
name: AutoGPT Platform - Backend CI
on:
push:
branches: [master, dev, ci-test*]
paths:
- ".github/workflows/platform-market-ci.yml"
- "autogpt_platform/market/**"
pull_request:
branches: [master, dev, release-*]
paths:
- ".github/workflows/platform-market-ci.yml"
- "autogpt_platform/market/**"
merge_group:
concurrency:
group: ${{ format('backend-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
cancel-in-progress: ${{ startsWith(github.event_name, 'pull_request') }}
defaults:
run:
shell: bash
working-directory: autogpt_platform/market
jobs:
test:
permissions:
contents: read
timeout-minutes: 30
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Setup Supabase
uses: supabase/setup-cli@v1
with:
version: latest
- id: get_date
name: Get date
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
- name: Set up Python dependency cache
uses: actions/cache@v4
with:
path: ~/.cache/pypoetry
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/market/poetry.lock') }}
- name: Install Poetry (Unix)
run: |
curl -sSL https://install.python-poetry.org | python3 -
if [ "${{ runner.os }}" = "macOS" ]; then
PATH="$HOME/.local/bin:$PATH"
echo "$HOME/.local/bin" >> $GITHUB_PATH
fi
- name: Install Python dependencies
run: poetry install
- name: Generate Prisma Client
run: poetry run prisma generate
- id: supabase
name: Start Supabase
working-directory: .
run: |
supabase init
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
# outputs:
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
- name: Run Database Migrations
run: poetry run prisma migrate dev --name updates
env:
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
- id: lint
name: Run Linter
run: poetry run lint
# Tests comment out because they do not work with prisma mock, nor have they been updated since they were created
# - name: Run pytest with coverage
# run: |
# if [[ "${{ runner.debug }}" == "1" ]]; then
# poetry run pytest -s -vv -o log_cli=true -o log_cli_level=DEBUG test
# else
# poetry run pytest -s -vv test
# fi
# if: success() || (failure() && steps.lint.outcome == 'failure')
# env:
# LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
# DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
# SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
# SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
# SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
# REDIS_HOST: 'localhost'
# REDIS_PORT: '6379'
# REDIS_PASSWORD: 'testpassword'
env:
CI: true
PLAIN_OUTPUT: True
RUN_ENV: local
PORT: 8080
# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v4
# with:
# token: ${{ secrets.CODECOV_TOKEN }}
# flags: backend,${{ runner.os }}

View File

@@ -110,7 +110,7 @@ repos:
- id: isort
name: Lint (isort) - AutoGPT Platform - Backend
alias: isort-platform-backend
entry: poetry -P autogpt_platform/backend run isort -p backend
entry: poetry -C autogpt_platform/backend run isort -p backend
files: ^autogpt_platform/backend/
types: [file, python]
language: system
@@ -118,7 +118,7 @@ repos:
- id: isort
name: Lint (isort) - Classic - AutoGPT
alias: isort-classic-autogpt
entry: poetry -P classic/original_autogpt run isort -p autogpt
entry: poetry -C classic/original_autogpt run isort -p autogpt
files: ^classic/original_autogpt/
types: [file, python]
language: system
@@ -126,7 +126,7 @@ repos:
- id: isort
name: Lint (isort) - Classic - Forge
alias: isort-classic-forge
entry: poetry -P classic/forge run isort -p forge
entry: poetry -C classic/forge run isort -p forge
files: ^classic/forge/
types: [file, python]
language: system
@@ -134,7 +134,7 @@ repos:
- id: isort
name: Lint (isort) - Classic - Benchmark
alias: isort-classic-benchmark
entry: poetry -P classic/benchmark run isort -p agbenchmark
entry: poetry -C classic/benchmark run isort -p agbenchmark
files: ^classic/benchmark/
types: [file, python]
language: system
@@ -178,6 +178,7 @@ repos:
name: Typecheck - AutoGPT Platform - Backend
alias: pyright-platform-backend
entry: poetry -C autogpt_platform/backend run pyright
args: [-p, autogpt_platform/backend, autogpt_platform/backend]
# include forge source (since it's a path dependency) but exclude *_test.py files:
files: ^autogpt_platform/(backend/((backend|test)/|(\w+\.py|poetry\.lock)$)|autogpt_libs/(autogpt_libs/.*(?<!_test)\.py|poetry\.lock)$)
types: [file]
@@ -188,6 +189,7 @@ repos:
name: Typecheck - AutoGPT Platform - Libs
alias: pyright-platform-libs
entry: poetry -C autogpt_platform/autogpt_libs run pyright
args: [-p, autogpt_platform/autogpt_libs, autogpt_platform/autogpt_libs]
files: ^autogpt_platform/autogpt_libs/(autogpt_libs/|poetry\.lock$)
types: [file]
language: system
@@ -197,6 +199,7 @@ repos:
name: Typecheck - Classic - AutoGPT
alias: pyright-classic-autogpt
entry: poetry -C classic/original_autogpt run pyright
args: [-p, classic/original_autogpt, classic/original_autogpt]
# include forge source (since it's a path dependency) but exclude *_test.py files:
files: ^(classic/original_autogpt/((autogpt|scripts|tests)/|poetry\.lock$)|classic/forge/(forge/.*(?<!_test)\.py|poetry\.lock)$)
types: [file]
@@ -207,6 +210,7 @@ repos:
name: Typecheck - Classic - Forge
alias: pyright-classic-forge
entry: poetry -C classic/forge run pyright
args: [-p, classic/forge, classic/forge]
files: ^classic/forge/(forge/|poetry\.lock$)
types: [file]
language: system
@@ -216,6 +220,7 @@ repos:
name: Typecheck - Classic - Benchmark
alias: pyright-classic-benchmark
entry: poetry -C classic/benchmark run pyright
args: [-p, classic/benchmark, classic/benchmark]
files: ^classic/benchmark/(agbenchmark/|tests/|poetry\.lock$)
types: [file]
language: system

View File

@@ -31,8 +31,7 @@ class RedisKeyedMutex:
try:
yield
finally:
if lock.locked():
lock.release()
lock.release()
def acquire(self, key: Any) -> "RedisLock":
"""Acquires and returns a lock with the given key"""
@@ -46,7 +45,7 @@ class RedisKeyedMutex:
return lock
def release(self, key: Any):
if (lock := self.locks.get(key)) and lock.locked() and lock.owned():
if lock := self.locks.get(key):
lock.release()
def release_all_locks(self):

View File

@@ -1415,29 +1415,29 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.8.6"
version = "0.8.3"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"},
{file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"},
{file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"},
{file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"},
{file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"},
{file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"},
{file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"},
{file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"},
{file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"},
{file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"},
{file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"},
{file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"},
{file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"},
{file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"},
{file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"},
{file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"},
{file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"},
{file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"},
]
[[package]]
@@ -1852,4 +1852,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
content-hash = "bf1b0125759dadb1369fff05ffba64fea3e82b9b7a43d0068e1c80974a4ebc1c"
content-hash = "13a36d3be675cab4a3eb2e6a62a1b08df779bded4c7b9164d8be300dc08748d0"

View File

@@ -21,7 +21,7 @@ supabase = "^2.10.0"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.1"
ruff = "^0.8.6"
ruff = "^0.8.3"
[build-system]
requires = ["poetry-core"]

View File

@@ -58,17 +58,7 @@ GITHUB_CLIENT_SECRET=
GOOGLE_CLIENT_ID=
GOOGLE_CLIENT_SECRET=
# Twitter (X) OAuth 2.0 with PKCE Configuration
# 1. Create a Twitter Developer Account:
# - Visit https://developer.x.com/en and sign up
# 2. Set up your application:
# - Navigate to Developer Portal > Projects > Create Project
# - Add a new app to your project
# 3. Configure app settings:
# - App Permissions: Read + Write + Direct Messages
# - App Type: Web App, Automated App or Bot
# - OAuth 2.0 Callback URL: http://localhost:3000/auth/integrations/oauth_callback
# - Save your Client ID and Client Secret below
# Twitter/X OAuth App server credentials - https://developer.x.com/en/products/x-api
TWITTER_CLIENT_ID=
TWITTER_CLIENT_SECRET=
@@ -121,18 +111,6 @@ REPLICATE_API_KEY=
# Ideogram
IDEOGRAM_API_KEY=
# Fal
FAL_API_KEY=
# Exa
EXA_API_KEY=
# E2B
E2B_API_KEY=
# Nvidia
NVIDIA_API_KEY=
# Logging Configuration
LOG_LEVEL=INFO
ENABLE_CLOUD_LOGGING=false

View File

@@ -17,11 +17,12 @@ RUN apt-get install -y libz-dev
RUN apt-get install -y libssl-dev
RUN apt-get install -y postgresql-client
ENV POETRY_VERSION=1.8.3
ENV POETRY_HOME=/opt/poetry
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_CREATE=false
ENV PATH=/opt/poetry/bin:$PATH
# Upgrade pip and setuptools to fix security vulnerabilities
RUN pip3 install --upgrade pip setuptools
@@ -31,21 +32,25 @@ RUN pip3 install poetry
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
WORKDIR /app/autogpt_platform/backend
RUN poetry install --no-ansi --no-root
RUN poetry config virtualenvs.create false \
&& poetry install --no-interaction --no-ansi
# Generate Prisma client
COPY autogpt_platform/backend/schema.prisma ./
RUN poetry run prisma generate
RUN poetry config virtualenvs.create false \
&& poetry run prisma generate
FROM python:3.11.10-slim-bookworm AS server_dependencies
WORKDIR /app
ENV POETRY_HOME=/opt/poetry \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false
ENV POETRY_VERSION=1.8.3
ENV POETRY_HOME=/opt/poetry
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_CREATE=false
ENV PATH=/opt/poetry/bin:$PATH
# Upgrade pip and setuptools to fix security vulnerabilities
RUN pip3 install --upgrade pip setuptools
@@ -71,7 +76,6 @@ WORKDIR /app/autogpt_platform/backend
FROM server_dependencies AS server
COPY autogpt_platform/backend /app/autogpt_platform/backend
RUN poetry install --no-ansi --only-root
ENV DATABASE_URL=""
ENV PORT=8000

View File

@@ -76,11 +76,7 @@ class AgentExecutorBlock(Block):
)
if not event.node_id:
if event.status in [
ExecutionStatus.COMPLETED,
ExecutionStatus.TERMINATED,
ExecutionStatus.FAILED,
]:
if event.status in [ExecutionStatus.COMPLETED, ExecutionStatus.FAILED]:
logger.info(f"Execution {log_id} ended with status {event.status}")
break
else:

View File

@@ -241,7 +241,7 @@ class AgentOutputBlock(Block):
advanced=True,
)
format: str = SchemaField(
description="The format string to be used to format the recorded_value. Use Jinja2 syntax.",
description="The format string to be used to format the recorded_value.",
default="",
advanced=True,
)

View File

@@ -699,420 +699,3 @@ class GithubDeleteBranchBlock(Block):
input_data.branch,
)
yield "status", status
class GithubCreateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path where the file should be created",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="Content to write to the file",
placeholder="File content here",
)
branch: str = SchemaField(
description="Branch where the file should be created",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Create new file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(
description="Error message if the file creation failed"
)
def __init__(self):
super().__init__(
id="8fd132ac-b917-428a-8159-d62893e8a3fe",
description="This block creates a new file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateFileBlock.Input,
output_schema=GithubCreateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Test content",
"branch": "main",
"commit_message": "Create test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "abc123"),
],
test_mock={
"create_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"abc123",
)
},
)
@staticmethod
def create_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# Convert content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Create the file using the GitHub API
contents_url = f"{repo_url}/contents/{file_path}"
data = {
"message": commit_message,
"content": content_base64,
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.create_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubUpdateFileBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
file_path: str = SchemaField(
description="Path to the file to update",
placeholder="path/to/file.txt",
)
content: str = SchemaField(
description="New content for the file",
placeholder="Updated content here",
)
branch: str = SchemaField(
description="Branch containing the file",
default="main",
)
commit_message: str = SchemaField(
description="Message for the commit",
default="Update file",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the updated file")
sha: str = SchemaField(description="SHA of the commit")
error: str = SchemaField(description="Error message if the file update failed")
def __init__(self):
super().__init__(
id="30be12a4-57cb-4aa4-baf5-fcc68d136076",
description="This block updates an existing file in a GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubUpdateFileBlock.Input,
output_schema=GithubUpdateFileBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"file_path": "test/file.txt",
"content": "Updated content",
"branch": "main",
"commit_message": "Update test file",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/repo/blob/main/test/file.txt"),
("sha", "def456"),
],
test_mock={
"update_file": lambda *args, **kwargs: (
"https://github.com/owner/repo/blob/main/test/file.txt",
"def456",
)
},
)
@staticmethod
def update_file(
credentials: GithubCredentials,
repo_url: str,
file_path: str,
content: str,
branch: str,
commit_message: str,
) -> tuple[str, str]:
api = get_api(credentials)
# First get the current file to get its SHA
contents_url = f"{repo_url}/contents/{file_path}"
params = {"ref": branch}
response = api.get(contents_url, params=params)
current_file = response.json()
# Convert new content to base64
content_bytes = content.encode("utf-8")
content_base64 = base64.b64encode(content_bytes).decode("utf-8")
# Update the file
data = {
"message": commit_message,
"content": content_base64,
"sha": current_file["sha"],
"branch": branch,
}
response = api.put(contents_url, json=data)
result = response.json()
return result["content"]["html_url"], result["commit"]["sha"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, sha = self.update_file(
credentials,
input_data.repo_url,
input_data.file_path,
input_data.content,
input_data.branch,
input_data.commit_message,
)
yield "url", url
yield "sha", sha
except Exception as e:
yield "error", str(e)
class GithubCreateRepositoryBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
name: str = SchemaField(
description="Name of the repository to create",
placeholder="my-new-repo",
)
description: str = SchemaField(
description="Description of the repository",
placeholder="A description of the repository",
default="",
)
private: bool = SchemaField(
description="Whether the repository should be private",
default=False,
)
auto_init: bool = SchemaField(
description="Whether to initialize the repository with a README",
default=True,
)
gitignore_template: str = SchemaField(
description="Git ignore template to use (e.g., Python, Node, Java)",
default="",
)
class Output(BlockSchema):
url: str = SchemaField(description="URL of the created repository")
clone_url: str = SchemaField(description="Git clone URL of the repository")
error: str = SchemaField(
description="Error message if the repository creation failed"
)
def __init__(self):
super().__init__(
id="029ec3b8-1cfd-46d3-b6aa-28e4a706efd1",
description="This block creates a new GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubCreateRepositoryBlock.Input,
output_schema=GithubCreateRepositoryBlock.Output,
test_input={
"name": "test-repo",
"description": "A test repository",
"private": False,
"auto_init": True,
"gitignore_template": "Python",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("url", "https://github.com/owner/test-repo"),
("clone_url", "https://github.com/owner/test-repo.git"),
],
test_mock={
"create_repository": lambda *args, **kwargs: (
"https://github.com/owner/test-repo",
"https://github.com/owner/test-repo.git",
)
},
)
@staticmethod
def create_repository(
credentials: GithubCredentials,
name: str,
description: str,
private: bool,
auto_init: bool,
gitignore_template: str,
) -> tuple[str, str]:
api = get_api(credentials, convert_urls=False) # Disable URL conversion
data = {
"name": name,
"description": description,
"private": private,
"auto_init": auto_init,
}
if gitignore_template:
data["gitignore_template"] = gitignore_template
# Create repository using the user endpoint
response = api.post("https://api.github.com/user/repos", json=data)
result = response.json()
return result["html_url"], result["clone_url"]
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
url, clone_url = self.create_repository(
credentials,
input_data.name,
input_data.description,
input_data.private,
input_data.auto_init,
input_data.gitignore_template,
)
yield "url", url
yield "clone_url", clone_url
except Exception as e:
yield "error", str(e)
class GithubListStargazersBlock(Block):
class Input(BlockSchema):
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
repo_url: str = SchemaField(
description="URL of the GitHub repository",
placeholder="https://github.com/owner/repo",
)
class Output(BlockSchema):
class StargazerItem(TypedDict):
username: str
url: str
stargazer: StargazerItem = SchemaField(
title="Stargazer",
description="Stargazers with their username and profile URL",
)
error: str = SchemaField(
description="Error message if listing stargazers failed"
)
def __init__(self):
super().__init__(
id="a4b9c2d1-e5f6-4g7h-8i9j-0k1l2m3n4o5p", # Generated unique UUID
description="This block lists all users who have starred a specified GitHub repository.",
categories={BlockCategory.DEVELOPER_TOOLS},
input_schema=GithubListStargazersBlock.Input,
output_schema=GithubListStargazersBlock.Output,
test_input={
"repo_url": "https://github.com/owner/repo",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"stargazer",
{
"username": "octocat",
"url": "https://github.com/octocat",
},
)
],
test_mock={
"list_stargazers": lambda *args, **kwargs: [
{
"username": "octocat",
"url": "https://github.com/octocat",
}
]
},
)
@staticmethod
def list_stargazers(
credentials: GithubCredentials, repo_url: str
) -> list[Output.StargazerItem]:
api = get_api(credentials)
# Add /stargazers to the repo URL to get stargazers endpoint
stargazers_url = f"{repo_url}/stargazers"
# Set accept header to get starred_at timestamp
headers = {"Accept": "application/vnd.github.star+json"}
response = api.get(stargazers_url, headers=headers)
data = response.json()
stargazers: list[GithubListStargazersBlock.Output.StargazerItem] = [
{
"username": stargazer["login"],
"url": stargazer["html_url"],
}
for stargazer in data
]
return stargazers
def run(
self,
input_data: Input,
*,
credentials: GithubCredentials,
**kwargs,
) -> BlockOutput:
try:
stargazers = self.list_stargazers(
credentials,
input_data.repo_url,
)
yield from (("stargazer", stargazer) for stargazer in stargazers)
except Exception as e:
yield "error", str(e)

View File

@@ -56,24 +56,15 @@ class SendWebRequestBlock(Block):
)
def run(self, input_data: Input, **kwargs) -> BlockOutput:
body = input_data.body
if input_data.json_format:
if isinstance(body, str):
try:
# Try to parse as JSON first
body = json.loads(body)
except json.JSONDecodeError:
# If it's not valid JSON and just plain text,
# we should send it as plain text instead
input_data.json_format = False
if isinstance(input_data.body, str):
input_data.body = json.loads(input_data.body)
response = requests.request(
input_data.method.value,
input_data.url,
headers=input_data.headers,
json=body if input_data.json_format else None,
data=body if not input_data.json_format else None,
json=input_data.body if input_data.json_format else None,
data=input_data.body if not input_data.json_format else None,
)
result = response.json() if input_data.json_format else response.text

View File

@@ -26,10 +26,8 @@ from backend.data.model import (
)
from backend.util import json
from backend.util.settings import BehaveAs, Settings
from backend.util.text import TextFormatter
logger = logging.getLogger(__name__)
fmt = TextFormatter()
LLMProviderName = Literal[
ProviderName.ANTHROPIC,
@@ -111,7 +109,6 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
LLAMA3_1_70B = "llama-3.1-70b-versatile"
LLAMA3_1_8B = "llama-3.1-8b-instant"
# Ollama models
OLLAMA_LLAMA3_2 = "llama3.2"
OLLAMA_LLAMA3_8B = "llama3"
OLLAMA_LLAMA3_405B = "llama3.1:405b"
OLLAMA_DOLPHIN = "dolphin-mistral:latest"
@@ -166,7 +163,6 @@ MODEL_METADATA = {
# Limited to 16k during preview
LlmModel.LLAMA3_1_70B: ModelMetadata("groq", 131072),
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768),
@@ -238,9 +234,7 @@ class AIStructuredResponseGeneratorBlock(Block):
description="Number of times to retry the LLM call if the response does not match the expected format.",
)
prompt_values: dict[str, str] = SchemaField(
advanced=False,
default={},
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
advanced=False, default={}, description="Values used to fill in the prompt."
)
max_tokens: int | None = SchemaField(
advanced=True,
@@ -454,8 +448,8 @@ class AIStructuredResponseGeneratorBlock(Block):
values = input_data.prompt_values
if values:
input_data.prompt = fmt.format_string(input_data.prompt, values)
input_data.sys_prompt = fmt.format_string(input_data.sys_prompt, values)
input_data.prompt = input_data.prompt.format(**values)
input_data.sys_prompt = input_data.sys_prompt.format(**values)
if input_data.sys_prompt:
prompt.append({"role": "system", "content": input_data.sys_prompt})
@@ -582,9 +576,7 @@ class AITextGeneratorBlock(Block):
description="Number of times to retry the LLM call if the response does not match the expected format.",
)
prompt_values: dict[str, str] = SchemaField(
advanced=False,
default={},
description="Values used to fill in the prompt. The values can be used in the prompt by putting them in a double curly braces, e.g. {{variable_name}}.",
advanced=False, default={}, description="Values used to fill in the prompt."
)
ollama_host: str = SchemaField(
advanced=True,

View File

@@ -1,32 +0,0 @@
from typing import Literal
from pydantic import SecretStr
from backend.data.model import APIKeyCredentials, CredentialsField, CredentialsMetaInput
from backend.integrations.providers import ProviderName
NvidiaCredentials = APIKeyCredentials
NvidiaCredentialsInput = CredentialsMetaInput[
Literal[ProviderName.NVIDIA],
Literal["api_key"],
]
TEST_CREDENTIALS = APIKeyCredentials(
id="01234567-89ab-cdef-0123-456789abcdef",
provider="nvidia",
api_key=SecretStr("mock-nvidia-api-key"),
title="Mock Nvidia API key",
expires_at=None,
)
TEST_CREDENTIALS_INPUT = {
"provider": TEST_CREDENTIALS.provider,
"id": TEST_CREDENTIALS.id,
"type": TEST_CREDENTIALS.type,
"title": TEST_CREDENTIALS.title,
}
def NvidiaCredentialsField() -> NvidiaCredentialsInput:
"""Creates an Nvidia credentials input on a block."""
return CredentialsField(description="The Nvidia integration requires an API Key.")

View File

@@ -1,90 +0,0 @@
from backend.blocks.nvidia._auth import (
NvidiaCredentials,
NvidiaCredentialsField,
NvidiaCredentialsInput,
)
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from backend.util.request import requests
class NvidiaDeepfakeDetectBlock(Block):
class Input(BlockSchema):
credentials: NvidiaCredentialsInput = NvidiaCredentialsField()
image_base64: str = SchemaField(
description="Image to analyze for deepfakes", image_upload=True
)
return_image: bool = SchemaField(
description="Whether to return the processed image with markings",
default=False,
)
class Output(BlockSchema):
status: str = SchemaField(
description="Detection status (SUCCESS, ERROR, CONTENT_FILTERED)",
default="",
)
image: str = SchemaField(
description="Processed image with detection markings (if return_image=True)",
default="",
image_output=True,
)
is_deepfake: float = SchemaField(
description="Probability that the image is a deepfake (0-1)",
default=0.0,
)
def __init__(self):
super().__init__(
id="8c7d0d67-e79c-44f6-92a1-c2600c8aac7f",
description="Detects potential deepfakes in images using Nvidia's AI API",
categories={BlockCategory.SAFETY},
input_schema=NvidiaDeepfakeDetectBlock.Input,
output_schema=NvidiaDeepfakeDetectBlock.Output,
)
def run(
self, input_data: Input, *, credentials: NvidiaCredentials, **kwargs
) -> BlockOutput:
url = "https://ai.api.nvidia.com/v1/cv/hive/deepfake-image-detection"
headers = {
"accept": "application/json",
"content-type": "application/json",
"Authorization": f"Bearer {credentials.api_key.get_secret_value()}",
}
image_data = f"data:image/jpeg;base64,{input_data.image_base64}"
payload = {
"input": [image_data],
"return_image": input_data.return_image,
}
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
result = data.get("data", [{}])[0]
# Get deepfake probability from first bounding box if any
deepfake_prob = 0.0
if result.get("bounding_boxes"):
deepfake_prob = result["bounding_boxes"][0].get("is_deepfake", 0.0)
yield "status", result.get("status", "ERROR")
yield "is_deepfake", deepfake_prob
if input_data.return_image:
image_data = result.get("image", "")
output_data = f"data:image/jpeg;base64,{image_data}"
yield "image", output_data
else:
yield "image", ""
except Exception as e:
yield "error", str(e)
yield "status", "ERROR"
yield "is_deepfake", 0.0
yield "image", ""

View File

@@ -141,10 +141,10 @@ class ExtractTextInformationBlock(Block):
class FillTextTemplateBlock(Block):
class Input(BlockSchema):
values: dict[str, Any] = SchemaField(
description="Values (dict) to be used in format. These values can be used by putting them in double curly braces in the format template. e.g. {{value_name}}.",
description="Values (dict) to be used in format"
)
format: str = SchemaField(
description="Template to format the text using `values`. Use Jinja2 syntax."
description="Template to format the text using `values`"
)
class Output(BlockSchema):
@@ -160,7 +160,7 @@ class FillTextTemplateBlock(Block):
test_input=[
{
"values": {"name": "Alice", "hello": "Hello", "world": "World!"},
"format": "{{hello}}, {{ world }} {{name}}",
"format": "{hello}, {world} {{name}}",
},
{
"values": {"list": ["Hello", " World!"]},

View File

@@ -342,7 +342,7 @@ class TweetPostBuilder:
def __init__(self):
self.params: Dict[str, Any] = {"user_auth": False}
def add_text(self, text: str | None):
def add_text(self, text: str):
if text:
self.params["text"] = text
return self

View File

@@ -118,10 +118,9 @@ class TwitterGetListBlock(Block):
meta = {}
owner_id = ""
owner_username = ""
included = {}
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data_dict = ResponseDataSerializer.serialize_dict(response.data)
if "users" in included:
owner_id = str(included["users"][0]["id"])
@@ -131,7 +130,6 @@ class TwitterGetListBlock(Block):
meta = response.meta
if response.data:
data_dict = ResponseDataSerializer.serialize_dict(response.data)
return data_dict, included, meta, owner_id, owner_username
raise Exception("List not found")
@@ -187,7 +185,7 @@ class TwitterGetOwnedListsBlock(Block):
required=True,
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results (default 100)",
advanced=True,
@@ -252,7 +250,7 @@ class TwitterGetOwnedListsBlock(Block):
def get_owned_lists(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
@@ -283,7 +281,6 @@ class TwitterGetOwnedListsBlock(Block):
response = cast(Response, client.get_owned_lists(**params))
meta = {}
included = {}
list_ids = []
list_names = []
next_token = None
@@ -292,21 +289,16 @@ class TwitterGetOwnedListsBlock(Block):
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [
str(item.id) for item in response.data if hasattr(item, "id")
]
list_names = [
item.name for item in response.data if hasattr(item, "name")
]
list_ids = [str(item.id) for item in response.data]
list_names = [item.name for item in response.data]
return data, included, meta, list_ids, list_names, next_token
raise Exception("User have no owned list")
raise Exception("Lists not found")
except tweepy.TweepyException:
raise

View File

@@ -198,7 +198,7 @@ class TwitterGetListMembersBlock(Block):
required=True,
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
default=10,
@@ -274,7 +274,7 @@ class TwitterGetListMembersBlock(Block):
def get_list_members(
credentials: TwitterCredentials,
list_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
@@ -305,7 +305,6 @@ class TwitterGetListMembersBlock(Block):
response = cast(Response, client.get_list_members(**params))
meta = {}
included = {}
next_token = None
user_ids = []
usernames = []
@@ -314,11 +313,10 @@ class TwitterGetListMembersBlock(Block):
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
user_ids = [str(user.id) for user in response.data]
usernames = [user.username for user in response.data]
return user_ids, usernames, data, included, meta, next_token
@@ -379,7 +377,7 @@ class TwitterGetListMembershipsBlock(Block):
required=True,
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
advanced=True,
@@ -440,7 +438,7 @@ class TwitterGetListMembershipsBlock(Block):
def get_list_memberships(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: ListExpansionsFilter | None,
user_fields: TweetUserFieldsFilter | None,
@@ -471,7 +469,6 @@ class TwitterGetListMembershipsBlock(Block):
response = cast(Response, client.get_list_memberships(**params))
meta = {}
included = {}
next_token = None
list_ids = []
@@ -479,11 +476,10 @@ class TwitterGetListMembershipsBlock(Block):
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [str(lst.id) for lst in response.data]
return data, included, meta, list_ids, next_token

View File

@@ -45,7 +45,7 @@ class TwitterGetListTweetsBlock(Block):
required=True,
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results per page (1-100)",
placeholder="Enter max results",
default=10,
@@ -116,7 +116,7 @@ class TwitterGetListTweetsBlock(Block):
def get_list_tweets(
credentials: TwitterCredentials,
list_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,
@@ -153,7 +153,6 @@ class TwitterGetListTweetsBlock(Block):
response = cast(Response, client.get_list_tweets(**params))
meta = {}
included = {}
tweet_ids = []
texts = []
next_token = None
@@ -162,11 +161,10 @@ class TwitterGetListTweetsBlock(Block):
meta = response.meta
next_token = meta.get("next_token")
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
tweet_ids = [str(item.id) for item in response.data]
texts = [item.text for item in response.data]

View File

@@ -94,14 +94,14 @@ class TwitterUpdateListBlock(Block):
advanced=False,
)
name: str | None = SchemaField(
name: str = SchemaField(
description="New name for the List",
placeholder="Enter list name",
default="",
advanced=False,
)
description: str | None = SchemaField(
description: str = SchemaField(
description="New description for the List",
placeholder="Enter list description",
default="",
@@ -133,10 +133,7 @@ class TwitterUpdateListBlock(Block):
@staticmethod
def update_list(
credentials: TwitterCredentials,
list_id: str,
name: str | None,
description: str | None,
credentials: TwitterCredentials, list_id: str, name: str, description: str
):
try:
client = tweepy.Client(
@@ -165,7 +162,10 @@ class TwitterUpdateListBlock(Block):
) -> BlockOutput:
try:
success = self.update_list(
credentials, input_data.list_id, input_data.name, input_data.description
credentials,
input_data.list_id,
input_data.name,
input_data.description,
)
yield "success", success
@@ -190,7 +190,7 @@ class TwitterCreateListBlock(Block):
default="",
)
description: str | None = SchemaField(
description: str = SchemaField(
description="Description of the List",
placeholder="Enter list description",
advanced=False,
@@ -231,10 +231,7 @@ class TwitterCreateListBlock(Block):
@staticmethod
def create_list(
credentials: TwitterCredentials,
name: str,
description: str | None,
private: bool,
credentials: TwitterCredentials, name: str, description: str, private: bool
):
try:
client = tweepy.Client(

View File

@@ -234,18 +234,16 @@ class TwitterGetPinnedListsBlock(Block):
response = cast(Response, client.get_pinned_lists(**params))
meta = {}
included = {}
list_ids = []
list_names = []
if response.meta:
meta = response.meta
if response.includes:
included = IncludesSerializer.serialize(response.includes)
included = IncludesSerializer.serialize(response.includes)
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
list_ids = [str(item.id) for item in response.data]
list_names = [item.name for item in response.data]
return data, included, meta, list_ids, list_names

View File

@@ -42,7 +42,7 @@ class TwitterSearchSpacesBlock(Block):
placeholder="Enter search query",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
@@ -111,7 +111,7 @@ class TwitterSearchSpacesBlock(Block):
def search_spaces(
credentials: TwitterCredentials,
query: str,
max_results: int | None,
max_results: int,
state: SpaceStatesFilter,
expansions: SpaceExpansionsFilter | None,
space_fields: SpaceFieldsFilter | None,
@@ -145,9 +145,9 @@ class TwitterSearchSpacesBlock(Block):
data = ResponseDataSerializer.serialize_list(response.data)
if response.data:
ids = [str(space["id"]) for space in response.data if "id" in space]
titles = [space["title"] for space in data if "title" in space]
host_ids = [space["host_ids"] for space in data if "host_ids" in space]
ids = [str(space["id"]) for space in response.data]
titles = [space["title"] for space in data]
host_ids = [space["host_ids"] for space in data]
return data, included, meta, ids, titles, host_ids, next_token

View File

@@ -1,7 +1,6 @@
from typing import Literal, Union, cast
from typing import cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
@@ -39,26 +38,6 @@ from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class SpaceList(BaseModel):
discriminator: Literal["space_list"]
space_ids: list[str] = SchemaField(
description="List of Space IDs to lookup (up to 100)",
placeholder="Enter Space IDs",
default=[],
advanced=False,
)
class UserList(BaseModel):
discriminator: Literal["user_list"]
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup their Spaces (up to 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
class TwitterGetSpacesBlock(Block):
"""
Gets information about multiple Twitter Spaces specified by Space IDs or creator user IDs
@@ -69,9 +48,17 @@ class TwitterGetSpacesBlock(Block):
["spaces.read", "users.read", "offline.access"]
)
identifier: Union[SpaceList, UserList] = SchemaField(
discriminator="discriminator",
description="Choose whether to lookup spaces by their IDs or by creator user IDs",
space_ids: list[str] = SchemaField(
description="List of Space IDs to lookup (up to 100)",
placeholder="Enter Space IDs",
default=[],
advanced=False,
)
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup their Spaces (up to 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
@@ -95,10 +82,8 @@ class TwitterGetSpacesBlock(Block):
input_schema=TwitterGetSpacesBlock.Input,
output_schema=TwitterGetSpacesBlock.Output,
test_input={
"identifier": {
"discriminator": "space_list",
"space_ids": ["1DXxyRYNejbKM"],
},
"space_ids": ["1DXxyRYNejbKM"],
"user_ids": [],
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"space_fields": None,
@@ -138,7 +123,8 @@ class TwitterGetSpacesBlock(Block):
@staticmethod
def get_spaces(
credentials: TwitterCredentials,
identifier: Union[SpaceList, UserList],
space_ids: list[str],
user_ids: list[str],
expansions: SpaceExpansionsFilter | None,
space_fields: SpaceFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
@@ -149,12 +135,8 @@ class TwitterGetSpacesBlock(Block):
)
params = {
"ids": (
identifier.space_ids if isinstance(identifier, SpaceList) else None
),
"user_ids": (
identifier.user_ids if isinstance(identifier, UserList) else None
),
"ids": None if space_ids == [] else space_ids,
"user_ids": None if user_ids == [] else user_ids,
}
params = (
@@ -174,8 +156,8 @@ class TwitterGetSpacesBlock(Block):
if response.data:
data = ResponseDataSerializer.serialize_list(response.data)
ids = [space["id"] for space in data if "id" in space]
titles = [space["title"] for space in data if "title" in space]
ids = [space["id"] for space in data]
titles = [space["title"] for space in data]
return data, included, ids, titles
@@ -194,7 +176,8 @@ class TwitterGetSpacesBlock(Block):
try:
data, included, ids, titles = self.get_spaces(
credentials,
input_data.identifier,
input_data.space_ids,
input_data.user_ids,
input_data.expansions,
input_data.space_fields,
input_data.user_fields,
@@ -357,14 +340,9 @@ class TwitterGetSpaceByIdBlock(Block):
# Common outputs
if space_data:
if "id" in space_data:
yield "id", space_data.get("id")
if "title" in space_data:
yield "title", space_data.get("title")
if "host_ids" in space_data:
yield "host_ids", space_data.get("host_ids")
yield "id", space_data.get("id")
yield "title", space_data.get("title")
yield "host_ids", space_data.get("host_ids")
if space_data:
yield "data", space_data

View File

@@ -107,7 +107,7 @@ class TwitterGetBookmarkedTweetsBlock(Block):
["tweet.read", "bookmark.read", "users.read", "offline.access"]
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
@@ -183,7 +183,7 @@ class TwitterGetBookmarkedTweetsBlock(Block):
@staticmethod
def get_bookmarked_tweets(
credentials: TwitterCredentials,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,

View File

@@ -119,7 +119,7 @@ class TwitterGetLikingUsersBlock(Block):
description="ID of the tweet to get liking users for",
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-100)",
placeholder="Enter max results",
default=10,
@@ -190,7 +190,7 @@ class TwitterGetLikingUsersBlock(Block):
def get_liking_users(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
@@ -293,7 +293,7 @@ class TwitterGetLikedTweetsBlock(Block):
description="ID of the user to get liked tweets for",
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (5-100)",
placeholder="100",
default=10,
@@ -384,7 +384,7 @@ class TwitterGetLikedTweetsBlock(Block):
def get_liked_tweets(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: ExpansionFilter | None,
media_fields: TweetMediaFieldsFilter | None,

View File

@@ -1,8 +1,7 @@
from datetime import datetime
from typing import List, Literal, Optional, Union, cast
from typing import cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
@@ -38,33 +37,6 @@ from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class Media(BaseModel):
discriminator: Literal["media"]
media_ids: Optional[List[str]] = None
media_tagged_user_ids: Optional[List[str]] = None
class DeepLink(BaseModel):
discriminator: Literal["deep_link"]
direct_message_deep_link: Optional[str] = None
class Poll(BaseModel):
discriminator: Literal["poll"]
poll_options: Optional[List[str]] = None
poll_duration_minutes: Optional[int] = None
class Place(BaseModel):
discriminator: Literal["place"]
place_id: Optional[str] = None
class Quote(BaseModel):
discriminator: Literal["quote"]
quote_tweet_id: Optional[str] = None
class TwitterPostTweetBlock(Block):
"""
Create a tweet on Twitter with the option to include one additional element such as a media, quote, or deep link.
@@ -75,36 +47,72 @@ class TwitterPostTweetBlock(Block):
["tweet.read", "tweet.write", "users.read", "offline.access"]
)
tweet_text: str | None = SchemaField(
description="Text of the tweet to post",
tweet_text: str = SchemaField(
description="Text of the tweet to post [It's Optional if you want to add media, quote, or deep link]",
placeholder="Enter your tweet",
default=None,
advanced=False,
default="",
)
media_ids: list = SchemaField(
description="List of media IDs to attach to the tweet, [ex - 1455952740635586573]",
placeholder="Enter media IDs",
default=[],
)
media_tagged_user_ids: list = SchemaField(
description="List of user IDs to tag in media, [ex - 1455952740635586573]",
placeholder="Enter media tagged user IDs",
default=[],
)
direct_message_deep_link: str = SchemaField(
description="Link directly to a Direct Message conversation with an account [ex - https://twitter.com/messages/compose?recipient_id={your_id}]",
placeholder="Enter direct message deep link",
default="",
)
poll_options: list = SchemaField(
description="List of poll options",
placeholder="Enter poll options",
default=[],
)
poll_duration_minutes: int = SchemaField(
description="Duration of the poll in minutes",
placeholder="Enter poll duration in minutes",
default=0,
)
for_super_followers_only: bool = SchemaField(
description="Tweet exclusively for Super Followers",
placeholder="Enter for super followers only",
advanced=True,
default=False,
)
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None = SchemaField(
discriminator="discriminator",
description="Additional tweet data (media, deep link, poll, place or quote)",
advanced=True,
place_id: str = SchemaField(
description="Adds optional location information to a tweet if geo settings are enabled in your profile.",
placeholder="Enter place ID",
default="",
)
exclude_reply_user_ids: Optional[List[str]] = SchemaField(
description="User IDs to exclude from reply Tweet thread. [ex - 6253282]",
quote_tweet_id: str = SchemaField(
description="Link to the Tweet being quoted, [ex- 1455953449422516226]",
advanced=True,
placeholder="Enter quote tweet ID",
default="",
)
exclude_reply_user_ids: list = SchemaField(
description="User IDs to exclude from reply Tweet thread. [ex - 6253282] ",
placeholder="Enter user IDs to exclude",
advanced=True,
default=None,
default=[],
)
in_reply_to_tweet_id: Optional[str] = SchemaField(
in_reply_to_tweet_id: str = SchemaField(
description="Tweet ID being replied to. Please note that in_reply_to_tweet_id needs to be in the request if exclude_reply_user_ids is present",
default=None,
default="",
placeholder="Enter in reply to tweet ID",
advanced=True,
)
@@ -133,11 +141,12 @@ class TwitterPostTweetBlock(Block):
test_input={
"tweet_text": "This is a test tweet.",
"credentials": TEST_CREDENTIALS_INPUT,
"attachment": {
"discriminator": "deep_link",
"direct_message_deep_link": "https://twitter.com/messages/compose",
},
"direct_message_deep_link": "",
"for_super_followers_only": False,
"place_id": "",
"media_ids": [],
"media_tagged_user_ids": [],
"quote_tweet_id": "",
"exclude_reply_user_ids": [],
"in_reply_to_tweet_id": "",
},
@@ -154,14 +163,20 @@ class TwitterPostTweetBlock(Block):
},
)
@staticmethod
def post_tweet(
self,
credentials: TwitterCredentials,
input_txt: str | None,
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None,
input_txt: str,
media_ids: list,
media_tagged_user_ids: list,
direct_message_deep_link: str,
for_super_followers_only: bool,
exclude_reply_user_ids: Optional[List[str]],
in_reply_to_tweet_id: Optional[str],
place_id: str,
poll_options: list,
poll_duration_minutes: int,
quote_tweet_id: str,
exclude_reply_user_ids: list,
in_reply_to_tweet_id: str,
reply_settings: TweetReplySettingsFilter,
):
try:
@@ -172,29 +187,20 @@ class TwitterPostTweetBlock(Block):
params = (
TweetPostBuilder()
.add_text(input_txt)
.add_media(media_ids, media_tagged_user_ids)
.add_deep_link(direct_message_deep_link)
.add_super_followers(for_super_followers_only)
.add_poll_options(poll_options)
.add_poll_duration(poll_duration_minutes)
.add_place(place_id)
.add_quote(quote_tweet_id)
.add_reply_settings(
exclude_reply_user_ids or [],
in_reply_to_tweet_id or "",
reply_settings,
exclude_reply_user_ids, in_reply_to_tweet_id, reply_settings
)
.build()
)
if isinstance(attachment, Media):
params.add_media(
attachment.media_ids or [], attachment.media_tagged_user_ids or []
)
elif isinstance(attachment, DeepLink):
params.add_deep_link(attachment.direct_message_deep_link or "")
elif isinstance(attachment, Poll):
params.add_poll_options(attachment.poll_options or [])
params.add_poll_duration(attachment.poll_duration_minutes or 0)
elif isinstance(attachment, Place):
params.add_place(attachment.place_id or "")
elif isinstance(attachment, Quote):
params.add_quote(attachment.quote_tweet_id or "")
tweet = cast(Response, client.create_tweet(**params.build()))
tweet = cast(Response, client.create_tweet(**params))
if not tweet.data:
raise Exception("Failed to create tweet")
@@ -219,8 +225,14 @@ class TwitterPostTweetBlock(Block):
tweet_id, tweet_url = self.post_tweet(
credentials,
input_data.tweet_text,
input_data.attachment,
input_data.media_ids,
input_data.media_tagged_user_ids,
input_data.direct_message_deep_link,
input_data.for_super_followers_only,
input_data.place_id,
input_data.poll_options,
input_data.poll_duration_minutes,
input_data.quote_tweet_id,
input_data.exclude_reply_user_ids,
input_data.in_reply_to_tweet_id,
input_data.reply_settings,

View File

@@ -45,14 +45,16 @@ class TwitterGetQuoteTweetsBlock(Block):
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Number of results to return (max 100)",
default=10,
advanced=True,
)
exclude: TweetExcludesFilter | None = SchemaField(
description="Types of tweets to exclude", advanced=True, default=None
description="Types of tweets to exclude",
advanced=True,
default=None
)
pagination_token: str | None = SchemaField(
@@ -123,7 +125,7 @@ class TwitterGetQuoteTweetsBlock(Block):
def get_quote_tweets(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
max_results: int,
exclude: TweetExcludesFilter | None,
pagination_token: str | None,
expansions: ExpansionFilter | None,

View File

@@ -191,7 +191,7 @@ class TwitterGetRetweetersBlock(Block):
placeholder="Enter tweet ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results per page (1-100)",
default=10,
placeholder="Enter max results",
@@ -270,7 +270,7 @@ class TwitterGetRetweetersBlock(Block):
def get_retweeters(
credentials: TwitterCredentials,
tweet_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,

View File

@@ -49,7 +49,7 @@ class TwitterGetUserMentionsBlock(Block):
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
@@ -143,7 +143,7 @@ class TwitterGetUserMentionsBlock(Block):
def get_mentions(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
max_results: int,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
@@ -290,7 +290,7 @@ class TwitterGetHomeTimelineBlock(Block):
["tweet.read", "users.read", "offline.access"]
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
@@ -376,7 +376,7 @@ class TwitterGetHomeTimelineBlock(Block):
@staticmethod
def get_timeline(
credentials: TwitterCredentials,
max_results: int | None,
max_results: int,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,
@@ -526,7 +526,7 @@ class TwitterGetUserTweetsBlock(Block):
placeholder="Enter user ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Number of tweets to retrieve (5-100)",
default=10,
advanced=True,
@@ -620,7 +620,7 @@ class TwitterGetUserTweetsBlock(Block):
def get_user_tweets(
credentials: TwitterCredentials,
user_id: str,
max_results: int | None,
max_results: int,
start_time: datetime | None,
end_time: datetime | None,
since_id: str | None,

View File

@@ -25,7 +25,7 @@ from backend.data.model import SchemaField
class TwitterUnblockUserBlock(Block):
"""
Unblock a specific user on Twitter. The request succeeds with no action when the user sends a request to a user they're not blocking or have already unblocked.
Unblock a specific user on Twitter
"""
class Input(BlockSchema):
@@ -98,7 +98,7 @@ class TwitterGetBlockedUsersBlock(Block):
["users.read", "offline.access", "block.read"]
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
@@ -156,7 +156,7 @@ class TwitterGetBlockedUsersBlock(Block):
@staticmethod
def get_blocked_users(
credentials: TwitterCredentials,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,

View File

@@ -28,8 +28,7 @@ from backend.data.model import SchemaField
class TwitterUnfollowUserBlock(Block):
"""
Allows a user to unfollow another user specified by target user ID.
The request succeeds with no action when the authenticated user sends a request to a user they're not following or have already unfollowed.
Allows a user to unfollow another user specified by target user ID
"""
class Input(BlockSchema):
@@ -97,10 +96,7 @@ class TwitterUnfollowUserBlock(Block):
class TwitterFollowUserBlock(Block):
"""
Allows a user to follow another user specified by target user ID. If the target user does not have public Tweets,
this endpoint will send a follow request. The request succeeds with no action when the authenticated user sends a
request to a user they're already following, or if they're sending a follower request to a user that does not have
public Tweets.
Allows a user to follow another user specified by target user ID
"""
class Input(BlockSchema):
@@ -179,7 +175,7 @@ class TwitterGetFollowersBlock(Block):
placeholder="Enter target user ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
@@ -244,7 +240,7 @@ class TwitterGetFollowersBlock(Block):
def get_followers(
credentials: TwitterCredentials,
target_user_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
@@ -352,7 +348,7 @@ class TwitterGetFollowingBlock(Block):
placeholder="Enter target user ID",
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="Maximum number of results to return (1-1000, default 100)",
placeholder="Enter max results",
default=10,
@@ -417,7 +413,7 @@ class TwitterGetFollowingBlock(Block):
def get_following(
credentials: TwitterCredentials,
target_user_id: str,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,

View File

@@ -28,8 +28,7 @@ from backend.data.model import SchemaField
class TwitterUnmuteUserBlock(Block):
"""
Allows a user to unmute another user specified by target user ID.
The request succeeds with no action when the user sends a request to a user they're not muting or have already unmuted.
Allows a user to unmute another user specified by target user ID
"""
class Input(BlockSchema):
@@ -105,7 +104,7 @@ class TwitterGetMutedUsersBlock(Block):
["users.read", "offline.access"]
)
max_results: int | None = SchemaField(
max_results: int = SchemaField(
description="The maximum number of results to be returned per page (1-1000). Default is 100.",
placeholder="Enter max results",
default=10,
@@ -177,7 +176,7 @@ class TwitterGetMutedUsersBlock(Block):
@staticmethod
def get_muted_users(
credentials: TwitterCredentials,
max_results: int | None,
max_results: int,
pagination_token: str | None,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,

View File

@@ -1,7 +1,6 @@
from typing import Literal, Union, cast
from typing import cast
import tweepy
from pydantic import BaseModel
from tweepy.client import Response
from backend.blocks.twitter._auth import (
@@ -27,18 +26,6 @@ from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
class UserId(BaseModel):
discriminator: Literal["user_id"]
user_id: str = SchemaField(description="The ID of the user to lookup", default="")
class Username(BaseModel):
discriminator: Literal["username"]
username: str = SchemaField(
description="The Twitter username (handle) of the user", default=""
)
class TwitterGetUserBlock(Block):
"""
Gets information about a single Twitter user specified by ID or username
@@ -49,9 +36,17 @@ class TwitterGetUserBlock(Block):
["users.read", "offline.access"]
)
identifier: Union[UserId, Username] = SchemaField(
discriminator="discriminator",
description="Choose whether to identify the user by their unique Twitter ID or by their username",
user_id: str = SchemaField(
description="The ID of the user to lookup",
placeholder="Enter user ID",
default="",
advanced=False,
)
username: str = SchemaField(
description="The Twitter username (handle) of the user",
placeholder="Enter username",
default="",
advanced=False,
)
@@ -76,7 +71,8 @@ class TwitterGetUserBlock(Block):
input_schema=TwitterGetUserBlock.Input,
output_schema=TwitterGetUserBlock.Output,
test_input={
"identifier": {"discriminator": "username", "username": "twitter"},
"user_id": "",
"username": "twitter",
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
@@ -118,7 +114,8 @@ class TwitterGetUserBlock(Block):
@staticmethod
def get_user(
credentials: TwitterCredentials,
identifier: Union[UserId, Username],
user_id: str,
username: str,
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
@@ -129,10 +126,8 @@ class TwitterGetUserBlock(Block):
)
params = {
"id": identifier.user_id if isinstance(identifier, UserId) else None,
"username": (
identifier.username if isinstance(identifier, Username) else None
),
"id": None if not user_id else user_id,
"username": None if not username else username,
"user_auth": False,
}
@@ -176,7 +171,8 @@ class TwitterGetUserBlock(Block):
try:
data, included, username, id, name = self.get_user(
credentials,
input_data.identifier,
input_data.user_id,
input_data.username,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,
@@ -195,26 +191,6 @@ class TwitterGetUserBlock(Block):
yield "error", handle_tweepy_exception(e)
class UserIdList(BaseModel):
discriminator: Literal["user_id_list"]
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup (max 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
class UsernameList(BaseModel):
discriminator: Literal["username_list"]
usernames: list[str] = SchemaField(
description="List of Twitter usernames/handles to lookup (max 100)",
placeholder="Enter usernames",
default=[],
advanced=False,
)
class TwitterGetUsersBlock(Block):
"""
Gets information about multiple Twitter users specified by IDs or usernames
@@ -225,9 +201,17 @@ class TwitterGetUsersBlock(Block):
["users.read", "offline.access"]
)
identifier: Union[UserIdList, UsernameList] = SchemaField(
discriminator="discriminator",
description="Choose whether to identify users by their unique Twitter IDs or by their usernames",
user_ids: list[str] = SchemaField(
description="List of user IDs to lookup (max 100)",
placeholder="Enter user IDs",
default=[],
advanced=False,
)
usernames: list[str] = SchemaField(
description="List of Twitter usernames/handles to lookup (max 100)",
placeholder="Enter usernames",
default=[],
advanced=False,
)
@@ -252,10 +236,8 @@ class TwitterGetUsersBlock(Block):
input_schema=TwitterGetUsersBlock.Input,
output_schema=TwitterGetUsersBlock.Output,
test_input={
"identifier": {
"discriminator": "username_list",
"usernames": ["twitter", "twitterdev"],
},
"user_ids": [],
"usernames": ["twitter", "twitterdev"],
"credentials": TEST_CREDENTIALS_INPUT,
"expansions": None,
"tweet_fields": None,
@@ -299,7 +281,8 @@ class TwitterGetUsersBlock(Block):
@staticmethod
def get_users(
credentials: TwitterCredentials,
identifier: Union[UserIdList, UsernameList],
user_ids: list[str],
usernames: list[str],
expansions: UserExpansionsFilter | None,
tweet_fields: TweetFieldsFilter | None,
user_fields: TweetUserFieldsFilter | None,
@@ -310,16 +293,8 @@ class TwitterGetUsersBlock(Block):
)
params = {
"ids": (
",".join(identifier.user_ids)
if isinstance(identifier, UserIdList)
else None
),
"usernames": (
",".join(identifier.usernames)
if isinstance(identifier, UsernameList)
else None
),
"ids": None if not user_ids else user_ids,
"usernames": None if not usernames else usernames,
"user_auth": False,
}
@@ -364,7 +339,8 @@ class TwitterGetUsersBlock(Block):
try:
data, included, usernames, ids, names = self.get_users(
credentials,
input_data.identifier,
input_data.user_ids,
input_data.usernames,
input_data.expansions,
input_data.tweet_fields,
input_data.user_fields,

View File

@@ -22,10 +22,10 @@ from backend.util import json
from backend.util.settings import Config
from .model import (
CREDENTIALS_FIELD_NAME,
ContributorDetails,
Credentials,
CredentialsMetaInput,
is_credentials_field_name,
)
app_config = Config()
@@ -61,9 +61,6 @@ class BlockCategory(Enum):
HARDWARE = "Block that interacts with hardware."
AGENT = "Block that interacts with other agents."
CRM = "Block that interacts with CRM services."
SAFETY = (
"Block that provides AI safety mechanisms such as detecting harmful content"
)
def dict(self) -> dict[str, str]:
return {"category": self.name, "description": self.value}
@@ -100,6 +97,11 @@ class BlockSchema(BaseModel):
cls.cached_jsonschema = cast(dict[str, Any], ref_to_dict(model))
# Set default properties values
for field in cls.cached_jsonschema.get("properties", {}).values():
if isinstance(field, dict) and "advanced" not in field:
field["advanced"] = True
return cls.cached_jsonschema
@classmethod
@@ -141,38 +143,17 @@ class BlockSchema(BaseModel):
@classmethod
def __pydantic_init_subclass__(cls, **kwargs):
"""Validates the schema definition. Rules:
- Fields with annotation `CredentialsMetaInput` MUST be
named `credentials` or `*_credentials`
- Fields named `credentials` or `*_credentials` MUST be
of type `CredentialsMetaInput`
- Only one `CredentialsMetaInput` field may be present.
- This field MUST be called `credentials`.
- A field that is called `credentials` MUST be a `CredentialsMetaInput`.
"""
super().__pydantic_init_subclass__(**kwargs)
# Reset cached JSON schema to prevent inheriting it from parent class
cls.cached_jsonschema = {}
credentials_fields = cls.get_credentials_fields()
for field_name in cls.get_fields():
if is_credentials_field_name(field_name):
if field_name not in credentials_fields:
raise TypeError(
f"Credentials field '{field_name}' on {cls.__qualname__} "
f"is not of type {CredentialsMetaInput.__name__}"
)
credentials_fields[field_name].validate_credentials_field_schema(cls)
elif field_name in credentials_fields:
raise KeyError(
f"Credentials field '{field_name}' on {cls.__qualname__} "
"has invalid name: must be 'credentials' or *_credentials"
)
@classmethod
def get_credentials_fields(cls) -> dict[str, type[CredentialsMetaInput]]:
return {
field_name: info.annotation
credentials_fields = [
field_name
for field_name, info in cls.model_fields.items()
if (
inspect.isclass(info.annotation)
@@ -181,7 +162,32 @@ class BlockSchema(BaseModel):
CredentialsMetaInput,
)
)
}
]
if len(credentials_fields) > 1:
raise ValueError(
f"{cls.__qualname__} can only have one CredentialsMetaInput field"
)
elif (
len(credentials_fields) == 1
and credentials_fields[0] != CREDENTIALS_FIELD_NAME
):
raise ValueError(
f"CredentialsMetaInput field on {cls.__qualname__} "
"must be named 'credentials'"
)
elif (
len(credentials_fields) == 0
and CREDENTIALS_FIELD_NAME in cls.model_fields.keys()
):
raise TypeError(
f"Field 'credentials' on {cls.__qualname__} "
f"must be of type {CredentialsMetaInput.__name__}"
)
if credentials_field := cls.model_fields.get(CREDENTIALS_FIELD_NAME):
credentials_input_type = cast(
CredentialsMetaInput, credentials_field.annotation
)
credentials_input_type.validate_credentials_field_schema(cls)
BlockSchemaInputType = TypeVar("BlockSchemaInputType", bound=BlockSchema)
@@ -254,7 +260,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
test_input: BlockInput | list[BlockInput] | None = None,
test_output: BlockData | list[BlockData] | None = None,
test_mock: dict[str, Any] | None = None,
test_credentials: Optional[Credentials | dict[str, Credentials]] = None,
test_credentials: Optional[Credentials] = None,
disabled: bool = False,
static_output: bool = False,
block_type: BlockType = BlockType.STANDARD,
@@ -296,16 +302,10 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
if self.webhook_config:
if isinstance(self.webhook_config, BlockWebhookConfig):
# Enforce presence of credentials field on auto-setup webhook blocks
if not (cred_fields := self.input_schema.get_credentials_fields()):
if CREDENTIALS_FIELD_NAME not in self.input_schema.model_fields:
raise TypeError(
"credentials field is required on auto-setup webhook blocks"
)
# Disallow multiple credentials inputs on webhook blocks
elif len(cred_fields) > 1:
raise ValueError(
"Multiple credentials inputs not supported on webhook blocks"
)
self.block_type = BlockType.WEBHOOK
else:
self.block_type = BlockType.WEBHOOK_MANUAL

View File

@@ -51,7 +51,6 @@ MODEL_COST: dict[LlmModel, int] = {
LlmModel.LLAMA3_1_405B: 1,
LlmModel.LLAMA3_1_70B: 1,
LlmModel.LLAMA3_1_8B: 1,
LlmModel.OLLAMA_LLAMA3_2: 1,
LlmModel.OLLAMA_LLAMA3_8B: 1,
LlmModel.OLLAMA_LLAMA3_405B: 1,
LlmModel.OLLAMA_DOLPHIN: 1,

View File

@@ -270,9 +270,9 @@ async def update_graph_execution_start_time(graph_exec_id: str):
async def update_graph_execution_stats(
graph_exec_id: str,
status: ExecutionStatus,
stats: dict[str, Any],
) -> ExecutionResult:
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
res = await AgentGraphExecution.prisma().update(
where={"id": graph_exec_id},
data={

View File

@@ -193,8 +193,7 @@ class Graph(BaseDbModel):
"properties": {
p.name: {
"secret": p.secret,
# Default value has to be set for advanced fields.
"advanced": p.advanced and p.value is not None,
"advanced": p.advanced,
"title": p.title or p.name,
**({"description": p.description} if p.description else {}),
**({"default": p.value} if p.value is not None else {}),
@@ -424,26 +423,6 @@ class GraphModel(Graph):
result[key] = value
return result
def clean_graph(self):
blocks = [block() for block in get_blocks().values()]
input_blocks = [
node
for node in self.nodes
if next(
(
b
for b in blocks
if b.id == node.block_id and b.block_type == BlockType.INPUT
),
None,
)
]
for node in self.nodes:
if any(input_block.id == node.id for input_block in input_blocks):
node.input_default["value"] = ""
# --------------------- CRUD functions --------------------- #
@@ -629,20 +608,25 @@ async def __create_graph(tx, graph: Graph, user_id: str):
"isTemplate": graph.is_template,
"isActive": graph.is_active,
"userId": user_id,
"AgentNodes": {
"create": [
{
"id": node.id,
"agentBlockId": node.block_id,
"constantInput": json.dumps(node.input_default),
"metadata": json.dumps(node.metadata),
}
for node in graph.nodes
]
},
}
)
await asyncio.gather(
*[
AgentNode.prisma(tx).create(
{
"id": node.id,
"agentBlockId": node.block_id,
"agentGraphId": graph.id,
"agentGraphVersion": graph.version,
"constantInput": json.dumps(node.input_default),
"metadata": json.dumps(node.metadata),
}
)
for node in graph.nodes
]
)
await asyncio.gather(
*[
AgentNodeLink.prisma(tx).create(

View File

@@ -35,7 +35,7 @@ class Webhook(BaseDbModel):
@computed_field
@property
def url(self) -> str:
return webhook_ingress_url(self.provider, self.id)
return webhook_ingress_url(self.provider.value, self.id)
@staticmethod
def from_db(webhook: IntegrationWebhook):

View File

@@ -134,20 +134,13 @@ def SchemaField(
title: Optional[str] = None,
description: Optional[str] = None,
placeholder: Optional[str] = None,
advanced: Optional[bool] = None,
advanced: Optional[bool] = False,
secret: bool = False,
exclude: bool = False,
hidden: Optional[bool] = None,
depends_on: list[str] | None = None,
image_upload: Optional[bool] = None,
image_output: Optional[bool] = None,
**kwargs,
) -> T:
if default is PydanticUndefined and default_factory is None:
advanced = False
elif advanced is None:
advanced = True
json_extra = {
k: v
for k, v in {
@@ -156,8 +149,6 @@ def SchemaField(
"advanced": advanced,
"hidden": hidden,
"depends_on": depends_on,
"image_upload": image_upload,
"image_output": image_output,
}.items()
if v is not None
}
@@ -250,8 +241,7 @@ CP = TypeVar("CP", bound=ProviderName)
CT = TypeVar("CT", bound=CredentialsType)
def is_credentials_field_name(field_name: str) -> bool:
return field_name == "credentials" or field_name.endswith("_credentials")
CREDENTIALS_FIELD_NAME = "credentials"
class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
@@ -260,21 +250,21 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
provider: CP
type: CT
@classmethod
def allowed_providers(cls) -> tuple[ProviderName, ...]:
return get_args(cls.model_fields["provider"].annotation)
@staticmethod
def _add_json_schema_extra(schema, cls: CredentialsMetaInput):
schema["credentials_provider"] = get_args(
cls.model_fields["provider"].annotation
)
schema["credentials_types"] = get_args(cls.model_fields["type"].annotation)
@classmethod
def allowed_cred_types(cls) -> tuple[CredentialsType, ...]:
return get_args(cls.model_fields["type"].annotation)
model_config = ConfigDict(
json_schema_extra=_add_json_schema_extra, # type: ignore
)
@classmethod
def validate_credentials_field_schema(cls, model: type["BlockSchema"]):
"""Validates the schema of a credentials input field"""
field_name = next(
name for name, type in model.get_credentials_fields().items() if type is cls
)
field_schema = model.jsonschema()["properties"][field_name]
"""Validates the schema of a `credentials` field"""
field_schema = model.jsonschema()["properties"][CREDENTIALS_FIELD_NAME]
try:
schema_extra = _CredentialsFieldSchemaExtra[CP, CT].model_validate(
field_schema
@@ -288,20 +278,11 @@ class CredentialsMetaInput(BaseModel, Generic[CP, CT]):
f"{field_schema}"
) from e
if len(cls.allowed_providers()) > 1 and not schema_extra.discriminator:
raise TypeError(
f"Multi-provider CredentialsField '{field_name}' "
"requires discriminator!"
)
@staticmethod
def _add_json_schema_extra(schema, cls: CredentialsMetaInput):
schema["credentials_provider"] = cls.allowed_providers()
schema["credentials_types"] = cls.allowed_cred_types()
model_config = ConfigDict(
json_schema_extra=_add_json_schema_extra, # type: ignore
)
if (
len(schema_extra.credentials_provider) > 1
and not schema_extra.discriminator
):
raise TypeError("Multi-provider CredentialsField requires discriminator!")
class _CredentialsFieldSchemaExtra(BaseModel, Generic[CP, CT]):

View File

@@ -10,6 +10,7 @@ from contextlib import contextmanager
from multiprocessing.pool import AsyncResult, Pool
from typing import TYPE_CHECKING, Any, Generator, TypeVar, cast
from pydantic import BaseModel
from redis.lock import Lock as RedisLock
if TYPE_CHECKING:
@@ -19,14 +20,7 @@ from autogpt_libs.utils.cache import thread_cached
from backend.blocks.agent import AgentExecutorBlock
from backend.data import redis
from backend.data.block import (
Block,
BlockData,
BlockInput,
BlockSchema,
BlockType,
get_block,
)
from backend.data.block import Block, BlockData, BlockInput, BlockType, get_block
from backend.data.execution import (
ExecutionQueue,
ExecutionResult,
@@ -37,6 +31,7 @@ from backend.data.execution import (
parse_execution_output,
)
from backend.data.graph import GraphModel, Link, Node
from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.util import json
from backend.util.decorator import error_logged, time_measured
@@ -175,11 +170,10 @@ def execute_node(
# one (running) block at a time; simultaneous execution of blocks using same
# credentials is not supported.
creds_lock = None
input_model = cast(type[BlockSchema], node_block.input_schema)
for field_name, input_type in input_model.get_credentials_fields().items():
credentials_meta = input_type(**input_data[field_name])
if CREDENTIALS_FIELD_NAME in input_data:
credentials_meta = CredentialsMetaInput(**input_data[CREDENTIALS_FIELD_NAME])
credentials, creds_lock = creds_manager.acquire(user_id, credentials_meta.id)
extra_exec_kwargs[field_name] = credentials
extra_exec_kwargs["credentials"] = credentials
output_size = 0
end_status = ExecutionStatus.COMPLETED
@@ -597,7 +591,7 @@ class Executor:
node_eid="*",
block_name="-",
)
timing_info, (exec_stats, status, error) = cls._on_graph_execution(
timing_info, (exec_stats, error) = cls._on_graph_execution(
graph_exec, cancel, log_metadata
)
exec_stats["walltime"] = timing_info.wall_time
@@ -605,7 +599,6 @@ class Executor:
exec_stats["error"] = str(error) if error else None
result = cls.db_client.update_graph_execution_stats(
graph_exec_id=graph_exec.graph_exec_id,
status=status,
stats=exec_stats,
)
cls.db_client.send_execution_update(result)
@@ -617,12 +610,11 @@ class Executor:
graph_exec: GraphExecutionEntry,
cancel: threading.Event,
log_metadata: LogMetadata,
) -> tuple[dict[str, Any], ExecutionStatus, Exception | None]:
) -> tuple[dict[str, Any], Exception | None]:
"""
Returns:
dict: The execution statistics of the graph execution.
ExecutionStatus: The final status of the graph execution.
Exception | None: The error that occurred during the execution, if any.
The execution statistics of the graph execution.
The error that occurred during the execution.
"""
log_metadata.info(f"Start graph execution {graph_exec.graph_exec_id}")
exec_stats = {
@@ -667,7 +659,8 @@ class Executor:
while not queue.empty():
if cancel.is_set():
return exec_stats, ExecutionStatus.TERMINATED, error
error = RuntimeError("Execution is cancelled")
return exec_stats, error
exec_data = queue.get()
@@ -697,7 +690,8 @@ class Executor:
)
for node_id, execution in list(running_executions.items()):
if cancel.is_set():
return exec_stats, ExecutionStatus.TERMINATED, error
error = RuntimeError("Execution is cancelled")
return exec_stats, error
if not queue.empty():
break # yield to parent loop to execute new queue items
@@ -716,12 +710,7 @@ class Executor:
finished = True
cancel.set()
cancel_thread.join()
return (
exec_stats,
ExecutionStatus.FAILED if error else ExecutionStatus.COMPLETED,
error,
)
return exec_stats, error
class ExecutionManager(AppService):
@@ -887,8 +876,11 @@ class ExecutionManager(AppService):
ExecutionStatus.COMPLETED,
ExecutionStatus.FAILED,
):
self.db_client.upsert_execution_output(
node_exec.node_exec_id, "error", "TERMINATED"
)
exec_update = self.db_client.update_execution_status(
node_exec.node_exec_id, ExecutionStatus.TERMINATED
node_exec.node_exec_id, ExecutionStatus.FAILED
)
self.db_client.send_execution_update(exec_update)
@@ -901,39 +893,41 @@ class ExecutionManager(AppService):
raise ValueError(f"Unknown block {node.block_id} for node #{node.id}")
# Find any fields of type CredentialsMetaInput
credentials_fields = cast(
type[BlockSchema], block.input_schema
).get_credentials_fields()
if not credentials_fields:
model_fields = cast(type[BaseModel], block.input_schema).model_fields
if CREDENTIALS_FIELD_NAME not in model_fields:
continue
for field_name, credentials_meta_type in credentials_fields.items():
credentials_meta = credentials_meta_type.model_validate(
node.input_default[field_name]
field = model_fields[CREDENTIALS_FIELD_NAME]
# The BlockSchema class enforces that a `credentials` field is always a
# `CredentialsMetaInput`, so we can safely assume this here.
credentials_meta_type = cast(CredentialsMetaInput, field.annotation)
credentials_meta = credentials_meta_type.model_validate(
node.input_default[CREDENTIALS_FIELD_NAME]
)
# Fetch the corresponding Credentials and perform sanity checks
credentials = self.credentials_store.get_creds_by_id(
user_id, credentials_meta.id
)
if not credentials:
raise ValueError(
f"Unknown credentials #{credentials_meta.id} "
f"for node #{node.id}"
)
# Fetch the corresponding Credentials and perform sanity checks
credentials = self.credentials_store.get_creds_by_id(
user_id, credentials_meta.id
if (
credentials.provider != credentials_meta.provider
or credentials.type != credentials_meta.type
):
logger.warning(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch: "
f"{credentials_meta.type}<>{credentials.type};"
f"{credentials_meta.provider}<>{credentials.provider}"
)
raise ValueError(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch"
)
if not credentials:
raise ValueError(
f"Unknown credentials #{credentials_meta.id} "
f"for node #{node.id} input '{field_name}'"
)
if (
credentials.provider != credentials_meta.provider
or credentials.type != credentials_meta.type
):
logger.warning(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch: "
f"{credentials_meta.type}<>{credentials.type};"
f"{credentials_meta.provider}<>{credentials.provider}"
)
raise ValueError(
f"Invalid credentials #{credentials.id} for node #{node.id}: "
"type/provider mismatch"
)
# ------- UTILITIES ------- #
@@ -953,8 +947,7 @@ def synchronized(key: str, timeout: int = 60):
lock.acquire()
yield
finally:
if lock.locked():
lock.release()
lock.release()
def llprint(message: str):

View File

@@ -99,10 +99,6 @@ class ExecutionScheduler(AppService):
def get_port(cls) -> int:
return config.execution_scheduler_port
@classmethod
def db_pool_size(cls) -> int:
return config.scheduler_db_pool_size
@property
@thread_cached
def execution_client(self) -> ExecutionManager:
@@ -114,11 +110,7 @@ class ExecutionScheduler(AppService):
self.scheduler = BlockingScheduler(
jobstores={
"default": SQLAlchemyJobStore(
engine=create_engine(
url=db_url,
pool_size=self.db_pool_size(),
max_overflow=0,
),
engine=create_engine(db_url),
metadata=MetaData(schema=db_schema),
)
}

View File

@@ -93,34 +93,6 @@ open_router_credentials = APIKeyCredentials(
title="Use Credits for Open Router",
expires_at=None,
)
fal_credentials = APIKeyCredentials(
id="6c0f5bd0-9008-4638-9d79-4b40b631803e",
provider="fal",
api_key=SecretStr(settings.secrets.fal_api_key),
title="Use Credits for FAL",
expires_at=None,
)
exa_credentials = APIKeyCredentials(
id="96153e04-9c6c-4486-895f-5bb683b1ecec",
provider="exa",
api_key=SecretStr(settings.secrets.exa_api_key),
title="Use Credits for Exa search",
expires_at=None,
)
e2b_credentials = APIKeyCredentials(
id="78d19fd7-4d59-4a16-8277-3ce310acf2b7",
provider="e2b",
api_key=SecretStr(settings.secrets.e2b_api_key),
title="Use Credits for E2B",
expires_at=None,
)
nvidia_credentials = APIKeyCredentials(
id="96b83908-2789-4dec-9968-18f0ece4ceb3",
provider="nvidia",
api_key=SecretStr(settings.secrets.nvidia_api_key),
title="Use Credits for Nvidia",
expires_at=None,
)
DEFAULT_CREDENTIALS = [
@@ -134,10 +106,6 @@ DEFAULT_CREDENTIALS = [
jina_credentials,
unreal_credentials,
open_router_credentials,
fal_credentials,
exa_credentials,
e2b_credentials,
nvidia_credentials,
]
@@ -189,14 +157,6 @@ class IntegrationCredentialsStore:
all_credentials.append(unreal_credentials)
if settings.secrets.open_router_api_key:
all_credentials.append(open_router_credentials)
if settings.secrets.fal_api_key:
all_credentials.append(fal_credentials)
if settings.secrets.exa_api_key:
all_credentials.append(exa_credentials)
if settings.secrets.e2b_api_key:
all_credentials.append(e2b_credentials)
if settings.secrets.nvidia_api_key:
all_credentials.append(nvidia_credentials)
return all_credentials
def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:

View File

@@ -92,7 +92,7 @@ class IntegrationCredentialsManager:
fresh_credentials = oauth_handler.refresh_tokens(credentials)
self.store.update_creds(user_id, fresh_credentials)
if _lock and _lock.locked():
if _lock:
_lock.release()
credentials = fresh_credentials
@@ -144,8 +144,7 @@ class IntegrationCredentialsManager:
try:
yield
finally:
if lock.locked():
lock.release()
lock.release()
def release_all_locks(self):
"""Call this on process termination to ensure all locks are released"""

View File

@@ -19,7 +19,6 @@ class ProviderName(str, Enum):
JINA = "jina"
MEDIUM = "medium"
NOTION = "notion"
NVIDIA = "nvidia"
OLLAMA = "ollama"
OPENAI = "openai"
OPENWEATHERMAP = "openweathermap"

View File

@@ -1,8 +1,9 @@
import logging
from typing import TYPE_CHECKING, Callable, Optional, cast
from backend.data.block import BlockSchema, BlockWebhookConfig, get_block
from backend.data.block import BlockWebhookConfig, get_block
from backend.data.graph import set_node_webhook
from backend.data.model import CREDENTIALS_FIELD_NAME
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
if TYPE_CHECKING:
@@ -29,28 +30,14 @@ async def on_graph_activate(
# Compare nodes in new_graph_version with previous_graph_version
updated_nodes = []
for new_node in graph.nodes:
block = get_block(new_node.block_id)
if not block:
raise ValueError(
f"Node #{new_node.id} is instance of unknown block #{new_node.block_id}"
)
block_input_schema = cast(BlockSchema, block.input_schema)
node_credentials = None
if (
# Webhook-triggered blocks are only allowed to have 1 credentials input
(
creds_field_name := next(
iter(block_input_schema.get_credentials_fields()), None
if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME):
node_credentials = get_credentials(creds_meta["id"])
if not node_credentials:
raise ValueError(
f"Node #{new_node.id} updated with non-existent "
f"credentials #{node_credentials}"
)
)
and (creds_meta := new_node.input_default.get(creds_field_name))
and not (node_credentials := get_credentials(creds_meta["id"]))
):
raise ValueError(
f"Node #{new_node.id} input '{creds_field_name}' updated with "
f"non-existent credentials #{creds_meta['id']}"
)
updated_node = await on_node_activate(
graph.user_id, new_node, credentials=node_credentials
@@ -75,28 +62,14 @@ async def on_graph_deactivate(
"""
updated_nodes = []
for node in graph.nodes:
block = get_block(node.block_id)
if not block:
raise ValueError(
f"Node #{node.id} is instance of unknown block #{node.block_id}"
)
block_input_schema = cast(BlockSchema, block.input_schema)
node_credentials = None
if (
# Webhook-triggered blocks are only allowed to have 1 credentials input
(
creds_field_name := next(
iter(block_input_schema.get_credentials_fields()), None
if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME):
node_credentials = get_credentials(creds_meta["id"])
if not node_credentials:
logger.error(
f"Node #{node.id} referenced non-existent "
f"credentials #{creds_meta['id']}"
)
)
and (creds_meta := node.input_default.get(creds_field_name))
and not (node_credentials := get_credentials(creds_meta["id"]))
):
logger.error(
f"Node #{node.id} input '{creds_field_name}' referenced non-existent "
f"credentials #{creds_meta['id']}"
)
updated_node = await on_node_deactivate(node, credentials=node_credentials)
updated_nodes.append(updated_node)
@@ -146,17 +119,14 @@ async def on_node_activate(
else:
resource = "" # not relevant for manual webhooks
block_input_schema = cast(BlockSchema, block.input_schema)
credentials_field_name = next(iter(block_input_schema.get_credentials_fields()), "")
needs_credentials = CREDENTIALS_FIELD_NAME in block.input_schema.model_fields
credentials_meta = (
node.input_default.get(credentials_field_name)
if credentials_field_name
else None
node.input_default.get(CREDENTIALS_FIELD_NAME) if needs_credentials else None
)
event_filter_input_name = block.webhook_config.event_filter_input
has_everything_for_webhook = (
resource is not None
and (credentials_meta or not credentials_field_name)
and (credentials_meta or not needs_credentials)
and (
not event_filter_input_name
or (
@@ -260,7 +230,7 @@ async def on_node_deactivate(
)
await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials)
if (
cast(BlockSchema, block.input_schema).get_credentials_fields()
CREDENTIALS_FIELD_NAME in block.input_schema.model_fields
and not credentials
):
logger.warning(

View File

@@ -1,12 +1,11 @@
from backend.integrations.providers import ProviderName
from backend.util.settings import Config
app_config = Config()
# TODO: add test to assert this matches the actual API route
def webhook_ingress_url(provider_name: ProviderName, webhook_id: str) -> str:
def webhook_ingress_url(provider_name: str, webhook_id: str) -> str:
return (
f"{app_config.platform_base_url}/api/integrations/{provider_name.value}"
f"{app_config.platform_base_url}/api/integrations/{provider_name}"
f"/webhooks/{webhook_id}/ingress"
)

View File

@@ -541,7 +541,7 @@ def get_execution_schedules(
@v1_router.post(
"/api-keys",
response_model=CreateAPIKeyResponse,
response_model=list[CreateAPIKeyResponse] | dict[str, str],
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@@ -583,7 +583,7 @@ async def get_api_keys(
@v1_router.get(
"/api-keys/{key_id}",
response_model=APIKeyWithoutHash,
response_model=list[APIKeyWithoutHash] | dict[str, str],
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@@ -604,7 +604,7 @@ async def get_api_key(
@v1_router.delete(
"/api-keys/{key_id}",
response_model=APIKeyWithoutHash,
response_model=list[APIKeyWithoutHash] | dict[str, str],
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@@ -626,7 +626,7 @@ async def delete_api_key(
@v1_router.post(
"/api-keys/{key_id}/suspend",
response_model=APIKeyWithoutHash,
response_model=list[APIKeyWithoutHash] | dict[str, str],
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)
@@ -648,7 +648,7 @@ async def suspend_key(
@v1_router.put(
"/api-keys/{key_id}/permissions",
response_model=APIKeyWithoutHash,
response_model=list[APIKeyWithoutHash] | dict[str, str],
tags=["api-keys"],
dependencies=[Depends(auth_middleware)],
)

View File

@@ -2,7 +2,6 @@ import autogpt_libs.auth.depends
import autogpt_libs.auth.middleware
import fastapi
import fastapi.testclient
import pytest
import pytest_mock
import backend.server.v2.library.db
@@ -81,7 +80,6 @@ def test_get_library_agents_error(mocker: pytest_mock.MockFixture):
mock_db_call.assert_called_once_with("test-user-id")
@pytest.mark.skip(reason="Mocker Not implemented")
def test_add_agent_to_library_success(mocker: pytest_mock.MockFixture):
mock_db_call = mocker.patch("backend.server.v2.library.db.add_agent_to_library")
mock_db_call.return_value = None
@@ -93,7 +91,6 @@ def test_add_agent_to_library_success(mocker: pytest_mock.MockFixture):
)
@pytest.mark.skip(reason="Mocker Not implemented")
def test_add_agent_to_library_error(mocker: pytest_mock.MockFixture):
mock_db_call = mocker.patch("backend.server.v2.library.db.add_agent_to_library")
mock_db_call.side_effect = Exception("Test error")

View File

@@ -1,18 +1,14 @@
import logging
import random
from datetime import datetime
from typing import Optional
import fastapi
import prisma.enums
import prisma.errors
import prisma.models
import prisma.types
import backend.data.graph
import backend.server.v2.store.exceptions
import backend.server.v2.store.model
from backend.data.graph import GraphModel
logger = logging.getLogger(__name__)
@@ -35,7 +31,7 @@ async def get_store_agents(
sanitized_query = search_query.strip()
if not sanitized_query or len(sanitized_query) > 100: # Reasonable length limit
raise backend.server.v2.store.exceptions.DatabaseError(
f"Invalid search query: len({len(sanitized_query)}) query: {search_query}"
"Invalid search query"
)
# Escape special SQL characters
@@ -453,11 +449,6 @@ async def create_store_submission(
)
try:
# Sanitize slug to only allow letters and hyphens
slug = "".join(
c if c.isalpha() or c == "-" or c.isnumeric() else "" for c in slug
).lower()
# First verify the agent belongs to this user
agent = await prisma.models.AgentGraph.prisma().find_first(
where=prisma.types.AgentGraphWhereInput(
@@ -645,12 +636,7 @@ async def update_or_create_profile(
logger.info(f"Updating profile for user {user_id} data: {profile}")
try:
# Sanitize username to only allow letters and hyphens
username = "".join(
c if c.isalpha() or c == "-" or c.isnumeric() else ""
for c in profile.username
).lower()
# Check if profile exists for user
existing_profile = await prisma.models.Profile.prisma().find_first(
where={"userId": user_id}
)
@@ -665,7 +651,7 @@ async def update_or_create_profile(
data={
"userId": user_id,
"name": profile.name,
"username": username,
"username": profile.username.lower(),
"description": profile.description,
"links": profile.links or [],
"avatarUrl": profile.avatar_url,
@@ -690,7 +676,7 @@ async def update_or_create_profile(
if profile.name is not None:
update_data["name"] = profile.name
if profile.username is not None:
update_data["username"] = username
update_data["username"] = profile.username.lower()
if profile.description is not None:
update_data["description"] = profile.description
if profile.links is not None:
@@ -790,45 +776,3 @@ async def get_my_agents(
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch my agents"
) from e
async def get_agent(
store_listing_version_id: str, version_id: Optional[int]
) -> GraphModel:
"""Get agent using the version ID and store listing version ID."""
try:
store_listing_version = (
await prisma.models.StoreListingVersion.prisma().find_unique(
where={"id": store_listing_version_id}, include={"Agent": True}
)
)
if not store_listing_version or not store_listing_version.Agent:
raise fastapi.HTTPException(
status_code=404,
detail=f"Store listing version {store_listing_version_id} not found",
)
agent = store_listing_version.Agent
graph = await backend.data.graph.get_graph(
agent.id, agent.version, template=True
)
if not graph:
raise fastapi.HTTPException(
status_code=404, detail=f"Agent {agent.id} not found"
)
graph.version = 1
graph.is_template = False
graph.is_active = True
delattr(graph, "user_id")
return graph
except Exception as e:
logger.error(f"Error getting agent: {str(e)}")
raise backend.server.v2.store.exceptions.DatabaseError(
"Failed to fetch agent"
) from e

View File

@@ -1,6 +1,4 @@
import json
import logging
import tempfile
import typing
import urllib.parse
@@ -8,9 +6,7 @@ import autogpt_libs.auth.depends
import autogpt_libs.auth.middleware
import fastapi
import fastapi.responses
from fastapi.encoders import jsonable_encoder
import backend.data.block
import backend.data.graph
import backend.server.v2.store.db
import backend.server.v2.store.image_gen
@@ -27,16 +23,12 @@ router = fastapi.APIRouter()
##############################################
@router.get(
"/profile",
tags=["store", "private"],
response_model=backend.server.v2.store.model.ProfileDetails,
)
@router.get("/profile", tags=["store", "private"])
async def get_profile(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
]
):
) -> backend.server.v2.store.model.ProfileDetails:
"""
Get the profile details for the authenticated user.
"""
@@ -45,24 +37,20 @@ async def get_profile(
return profile
except Exception:
logger.exception("Exception occurred whilst getting user profile")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while retrieving the user profile"},
)
raise
@router.post(
"/profile",
tags=["store", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=backend.server.v2.store.model.CreatorDetails,
)
async def update_or_create_profile(
profile: backend.server.v2.store.model.Profile,
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
):
) -> backend.server.v2.store.model.CreatorDetails:
"""
Update the store profile for the authenticated user.
@@ -83,10 +71,7 @@ async def update_or_create_profile(
return updated_profile
except Exception:
logger.exception("Exception occurred whilst updating profile")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while updating the user profile"},
)
raise
##############################################
@@ -94,11 +79,7 @@ async def update_or_create_profile(
##############################################
@router.get(
"/agents",
tags=["store", "public"],
response_model=backend.server.v2.store.model.StoreAgentsResponse,
)
@router.get("/agents", tags=["store", "public"])
async def get_agents(
featured: bool = False,
creator: str | None = None,
@@ -107,7 +88,7 @@ async def get_agents(
category: str | None = None,
page: int = 1,
page_size: int = 20,
):
) -> backend.server.v2.store.model.StoreAgentsResponse:
"""
Get a paginated list of agents from the store with optional filtering and sorting.
@@ -157,18 +138,13 @@ async def get_agents(
return agents
except Exception:
logger.exception("Exception occured whilst getting store agents")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while retrieving the store agents"},
)
raise
@router.get(
"/agents/{username}/{agent_name}",
tags=["store", "public"],
response_model=backend.server.v2.store.model.StoreAgentDetails,
)
async def get_agent(username: str, agent_name: str):
@router.get("/agents/{username}/{agent_name}", tags=["store", "public"])
async def get_agent(
username: str, agent_name: str
) -> backend.server.v2.store.model.StoreAgentDetails:
"""
This is only used on the AgentDetails Page
@@ -177,26 +153,20 @@ async def get_agent(username: str, agent_name: str):
try:
username = urllib.parse.unquote(username).lower()
# URL decode the agent name since it comes from the URL path
agent_name = urllib.parse.unquote(agent_name).lower()
agent_name = urllib.parse.unquote(agent_name)
agent = await backend.server.v2.store.db.get_store_agent_details(
username=username, agent_name=agent_name
)
return agent
except Exception:
logger.exception("Exception occurred whilst getting store agent details")
return fastapi.responses.JSONResponse(
status_code=500,
content={
"detail": "An error occurred while retrieving the store agent details"
},
)
raise
@router.post(
"/agents/{username}/{agent_name}/review",
tags=["store"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=backend.server.v2.store.model.StoreReview,
)
async def create_review(
username: str,
@@ -205,7 +175,7 @@ async def create_review(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
):
) -> backend.server.v2.store.model.StoreReview:
"""
Create a review for a store agent.
@@ -232,10 +202,7 @@ async def create_review(
return created_review
except Exception:
logger.exception("Exception occurred whilst creating store review")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while creating the store review"},
)
raise
##############################################
@@ -243,18 +210,14 @@ async def create_review(
##############################################
@router.get(
"/creators",
tags=["store", "public"],
response_model=backend.server.v2.store.model.CreatorsResponse,
)
@router.get("/creators", tags=["store", "public"])
async def get_creators(
featured: bool = False,
search_query: str | None = None,
sorted_by: str | None = None,
page: int = 1,
page_size: int = 20,
):
) -> backend.server.v2.store.model.CreatorsResponse:
"""
This is needed for:
- Home Page Featured Creators
@@ -288,20 +251,11 @@ async def get_creators(
return creators
except Exception:
logger.exception("Exception occurred whilst getting store creators")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while retrieving the store creators"},
)
raise
@router.get(
"/creator/{username}",
tags=["store", "public"],
response_model=backend.server.v2.store.model.CreatorDetails,
)
async def get_creator(
username: str,
):
@router.get("/creator/{username}", tags=["store", "public"])
async def get_creator(username: str) -> backend.server.v2.store.model.CreatorDetails:
"""
Get the details of a creator
- Creator Details Page
@@ -314,12 +268,7 @@ async def get_creator(
return creator
except Exception:
logger.exception("Exception occurred whilst getting creator details")
return fastapi.responses.JSONResponse(
status_code=500,
content={
"detail": "An error occurred while retrieving the creator details"
},
)
raise
############################################
@@ -329,36 +278,31 @@ async def get_creator(
"/myagents",
tags=["store", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=backend.server.v2.store.model.MyAgentsResponse,
)
async def get_my_agents(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
]
):
) -> backend.server.v2.store.model.MyAgentsResponse:
try:
agents = await backend.server.v2.store.db.get_my_agents(user_id)
return agents
except Exception:
logger.exception("Exception occurred whilst getting my agents")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while retrieving the my agents"},
)
raise
@router.delete(
"/submissions/{submission_id}",
tags=["store", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=bool,
)
async def delete_submission(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
submission_id: str,
):
) -> bool:
"""
Delete a store listing submission.
@@ -377,17 +321,13 @@ async def delete_submission(
return result
except Exception:
logger.exception("Exception occurred whilst deleting store submission")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while deleting the store submission"},
)
raise
@router.get(
"/submissions",
tags=["store", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=backend.server.v2.store.model.StoreSubmissionsResponse,
)
async def get_submissions(
user_id: typing.Annotated[
@@ -395,7 +335,7 @@ async def get_submissions(
],
page: int = 1,
page_size: int = 20,
):
) -> backend.server.v2.store.model.StoreSubmissionsResponse:
"""
Get a paginated list of store submissions for the authenticated user.
@@ -428,26 +368,20 @@ async def get_submissions(
return listings
except Exception:
logger.exception("Exception occurred whilst getting store submissions")
return fastapi.responses.JSONResponse(
status_code=500,
content={
"detail": "An error occurred while retrieving the store submissions"
},
)
raise
@router.post(
"/submissions",
tags=["store", "private"],
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
response_model=backend.server.v2.store.model.StoreSubmission,
)
async def create_submission(
submission_request: backend.server.v2.store.model.StoreSubmissionRequest,
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
):
) -> backend.server.v2.store.model.StoreSubmission:
"""
Create a new store listing submission.
@@ -477,10 +411,7 @@ async def create_submission(
return submission
except Exception:
logger.exception("Exception occurred whilst creating store submission")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while creating the store submission"},
)
raise
@router.post(
@@ -493,7 +424,7 @@ async def upload_submission_media(
user_id: typing.Annotated[
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
],
):
) -> str:
"""
Upload media (images/videos) for a store listing submission.
@@ -512,11 +443,10 @@ async def upload_submission_media(
user_id=user_id, file=file
)
return media_url
except Exception:
except Exception as e:
logger.exception("Exception occurred whilst uploading submission media")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while uploading the media file"},
raise fastapi.HTTPException(
status_code=500, detail=f"Failed to upload media file: {str(e)}"
)
@@ -573,72 +503,8 @@ async def generate_image(
)
return fastapi.responses.JSONResponse(content={"image_url": image_url})
except Exception:
except Exception as e:
logger.exception("Exception occurred whilst generating submission image")
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while generating the image"},
)
@router.get(
"/download/agents/{store_listing_version_id}",
tags=["store", "public"],
)
async def download_agent_file(
store_listing_version_id: str = fastapi.Path(
..., description="The ID of the agent to download"
),
version: typing.Optional[int] = fastapi.Query(
None, description="Specific version of the agent"
),
) -> fastapi.responses.FileResponse:
"""
Download the agent file by streaming its content.
Args:
agent_id (str): The ID of the agent to download.
version (Optional[int]): Specific version of the agent to download.
Returns:
StreamingResponse: A streaming response containing the agent's graph data.
Raises:
HTTPException: If the agent is not found or an unexpected error occurs.
"""
graph_data = await backend.server.v2.store.db.get_agent(
store_listing_version_id=store_listing_version_id, version_id=version
)
graph_data.clean_graph()
graph_date_dict = jsonable_encoder(graph_data)
def remove_credentials(obj):
if obj and isinstance(obj, dict):
if "credentials" in obj:
del obj["credentials"]
if "creds" in obj:
del obj["creds"]
for value in obj.values():
remove_credentials(value)
elif isinstance(obj, list):
for item in obj:
remove_credentials(item)
return obj
graph_date_dict = remove_credentials(graph_date_dict)
file_name = f"agent_{store_listing_version_id}_v{version or 'latest'}.json"
# Sending graph as a stream (similar to marketplace v1)
with tempfile.NamedTemporaryFile(
mode="w", suffix=".json", delete=False
) as tmp_file:
tmp_file.write(json.dumps(graph_date_dict))
tmp_file.flush()
return fastapi.responses.FileResponse(
tmp_file.name, filename=file_name, media_type="application/json"
raise fastapi.HTTPException(
status_code=500, detail=f"Failed to generate image: {str(e)}"
)

View File

@@ -38,7 +38,7 @@ def create_test_graph() -> graph.Graph:
graph.Node(
block_id=FillTextTemplateBlock().id,
input_default={
"format": "{{a}}, {{b}}{{c}}",
"format": "{a}, {b}{c}",
"values_#_c": "!!!",
},
),

View File

@@ -12,7 +12,6 @@ from backend.util.settings import Config
# List of IP networks to block
BLOCKED_IP_NETWORKS = [
# --8<-- [start:BLOCKED_IP_NETWORKS]
# IPv4 Ranges
ipaddress.ip_network("0.0.0.0/8"), # "This" Network
ipaddress.ip_network("10.0.0.0/8"), # Private-Use
ipaddress.ip_network("127.0.0.0/8"), # Loopback
@@ -21,11 +20,6 @@ BLOCKED_IP_NETWORKS = [
ipaddress.ip_network("192.168.0.0/16"), # Private-Use
ipaddress.ip_network("224.0.0.0/4"), # Multicast
ipaddress.ip_network("240.0.0.0/4"), # Reserved for Future Use
# IPv6 Ranges
ipaddress.ip_network("::1/128"), # Loopback
ipaddress.ip_network("fc00::/7"), # Unique local addresses (ULA)
ipaddress.ip_network("fe80::/10"), # Link-local
ipaddress.ip_network("ff00::/8"), # Multicast
# --8<-- [end:BLOCKED_IP_NETWORKS]
]
@@ -33,6 +27,18 @@ ALLOWED_SCHEMES = ["http", "https"]
HOSTNAME_REGEX = re.compile(r"^[A-Za-z0-9.-]+$") # Basic DNS-safe hostname pattern
def _canonicalize_url(url: str) -> str:
# Strip spaces and trailing slashes
url = url.strip().strip("/")
# Ensure the URL starts with http:// or https://
if not url.startswith(("http://", "https://")):
url = "http://" + url
# Replace backslashes with forward slashes to avoid parsing ambiguities
url = url.replace("\\", "/")
return url
def _is_ip_blocked(ip: str) -> bool:
"""
Checks if the IP address is in a blocked network.
@@ -43,16 +49,11 @@ def _is_ip_blocked(ip: str) -> bool:
def validate_url(url: str, trusted_origins: list[str]) -> str:
"""
Validates the URL to prevent SSRF attacks by ensuring it does not point
to a private, link-local, or otherwise blocked IP address unless
the hostname is explicitly trusted.
Validates the URL to prevent SSRF attacks by ensuring it does not point to a private
or untrusted IP address, unless whitelisted.
"""
# Canonicalize URL
url = url.strip("/ ").replace("\\", "/")
url = _canonicalize_url(url)
parsed = urlparse(url)
if not parsed.scheme:
url = f"http://{url}"
parsed = urlparse(url)
# Check scheme
if parsed.scheme not in ALLOWED_SCHEMES:
@@ -60,7 +61,7 @@ def validate_url(url: str, trusted_origins: list[str]) -> str:
f"Scheme '{parsed.scheme}' is not allowed. Only HTTP/HTTPS are supported."
)
# Validate and IDNA encode hostname
# Validate and IDNA encode the hostname
if not parsed.hostname:
raise ValueError("Invalid URL: No hostname found.")
@@ -74,11 +75,11 @@ def validate_url(url: str, trusted_origins: list[str]) -> str:
if not HOSTNAME_REGEX.match(ascii_hostname):
raise ValueError("Hostname contains invalid characters.")
# Rebuild URL with IDNA-encoded hostname
# Rebuild the URL with the normalized, IDNA-encoded hostname
parsed = parsed._replace(netloc=ascii_hostname)
url = str(urlunparse(parsed))
# If hostname is trusted, skip IP-based checks
# Check if hostname is a trusted origin (exact match)
if ascii_hostname in trusted_origins:
return url
@@ -91,12 +92,11 @@ def validate_url(url: str, trusted_origins: list[str]) -> str:
if not ip_addresses:
raise ValueError(f"No IP addresses found for {ascii_hostname}")
# Block any IP address that belongs to a blocked range
for ip_str in ip_addresses:
if _is_ip_blocked(ip_str):
# Check if any resolved IP address falls into blocked ranges
for ip in ip_addresses:
if _is_ip_blocked(ip):
raise ValueError(
f"Access to blocked or private IP address {ip_str} "
f"for hostname {ascii_hostname} is not allowed."
f"Access to private IP address {ip} for hostname {ascii_hostname} is not allowed."
)
return url
@@ -104,9 +104,7 @@ def validate_url(url: str, trusted_origins: list[str]) -> str:
class Requests:
"""
A wrapper around the requests library that validates URLs before
making requests, preventing SSRF by blocking private networks and
other disallowed address spaces.
A wrapper around the requests library that validates URLs before making requests.
"""
def __init__(
@@ -130,16 +128,13 @@ class Requests:
def request(
self, method, url, headers=None, allow_redirects=False, *args, **kwargs
) -> req.Response:
# Merge any extra headers
if self.extra_headers is not None:
headers = {**(headers or {}), **self.extra_headers}
# Validate the URL (with optional extra validator)
url = validate_url(url, self.trusted_origins)
if self.extra_url_validator is not None:
url = self.extra_url_validator(url)
# Perform the request
response = req.request(
method,
url,

View File

@@ -153,11 +153,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
description="The name of the Google Cloud Storage bucket for media files",
)
scheduler_db_pool_size: int = Field(
default=3,
description="The pool size for the scheduler database connection pool",
)
@field_validator("platform_base_url", "frontend_base_url")
@classmethod
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
@@ -304,10 +299,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
jina_api_key: str = Field(default="", description="Jina API Key")
unreal_speech_api_key: str = Field(default="", description="Unreal Speech API Key")
fal_api_key: str = Field(default="", description="FAL API key")
exa_api_key: str = Field(default="", description="Exa API key")
e2b_api_key: str = Field(default="", description="E2B API key")
nvidia_api_key: str = Field(default="", description="Nvidia API key")
fal_key: str = Field(default="", description="FAL API key")
# Add more secret fields as needed

View File

@@ -1,11 +1,11 @@
import logging
import time
from typing import Sequence, cast
from typing import Sequence
from backend.data import db
from backend.data.block import Block, BlockSchema, initialize_blocks
from backend.data.block import Block, initialize_blocks
from backend.data.execution import ExecutionResult, ExecutionStatus
from backend.data.model import _BaseCredentials
from backend.data.model import CREDENTIALS_FIELD_NAME
from backend.data.user import create_default_user
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
from backend.server.rest_api import AgentServer
@@ -65,9 +65,6 @@ async def wait_execution(
if status == ExecutionStatus.FAILED:
log.info("Execution failed")
raise Exception("Execution failed")
if status == ExecutionStatus.TERMINATED:
log.info("Execution terminated")
raise Exception("Execution terminated")
return status == ExecutionStatus.COMPLETED
# Wait for the executions to complete
@@ -103,22 +100,14 @@ def execute_block_test(block: Block):
else:
log.info(f"{prefix} mock {mock_name} not found in block")
# Populate credentials argument(s)
extra_exec_kwargs = {}
input_model = cast(type[BlockSchema], block.input_schema)
credentials_input_fields = input_model.get_credentials_fields()
if len(credentials_input_fields) == 1 and isinstance(
block.test_credentials, _BaseCredentials
):
field_name = next(iter(credentials_input_fields))
extra_exec_kwargs[field_name] = block.test_credentials
elif credentials_input_fields and block.test_credentials:
if not isinstance(block.test_credentials, dict):
raise TypeError(f"Block {block.name} has no usable test credentials")
else:
for field_name in credentials_input_fields:
if field_name in block.test_credentials:
extra_exec_kwargs[field_name] = block.test_credentials[field_name]
if CREDENTIALS_FIELD_NAME in block.input_schema.model_fields:
if not block.test_credentials:
raise ValueError(
f"{prefix} requires credentials but has no test_credentials"
)
extra_exec_kwargs[CREDENTIALS_FIELD_NAME] = block.test_credentials
for input_data in block.test_input:
log.info(f"{prefix} in: {input_data}")

View File

@@ -1,3 +1,5 @@
import re
from jinja2 import BaseLoader
from jinja2.sandbox import SandboxedEnvironment
@@ -13,5 +15,8 @@ class TextFormatter:
self.env.globals.clear()
def format_string(self, template_str: str, values=None, **kwargs) -> str:
# For python.format compatibility: replace all {...} with {{..}}.
# But avoid replacing {{...}} to {{{...}}}.
template_str = re.sub(r"(?<!{){[ a-zA-Z0-9_]+}", r"{\g<0>}", template_str)
template = self.env.from_string(template_str)
return template.render(values or {}, **kwargs)

View File

@@ -1,50 +0,0 @@
BEGIN;
DROP VIEW IF EXISTS "StoreAgent";
CREATE VIEW "StoreAgent" AS
WITH ReviewStats AS (
SELECT sl."id" AS "storeListingId",
COUNT(sr.id) AS review_count,
AVG(CAST(sr.score AS DECIMAL)) AS avg_rating
FROM "StoreListing" sl
JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl."id"
JOIN "StoreListingReview" sr ON sr."storeListingVersionId" = slv.id
WHERE sl."isDeleted" = FALSE
GROUP BY sl."id"
),
AgentRuns AS (
SELECT "agentGraphId", COUNT(*) AS run_count
FROM "AgentGraphExecution"
GROUP BY "agentGraphId"
)
SELECT
sl.id AS listing_id,
slv.id AS "storeListingVersionId",
slv."createdAt" AS updated_at,
slv.slug,
slv.name AS agent_name,
slv."videoUrl" AS agent_video,
COALESCE(slv."imageUrls", ARRAY[]::TEXT[]) AS agent_image,
slv."isFeatured" AS featured,
p.username AS creator_username,
p."avatarUrl" AS creator_avatar,
slv."subHeading" AS sub_heading,
slv.description,
slv.categories,
COALESCE(ar.run_count, 0) AS runs,
CAST(COALESCE(rs.avg_rating, 0.0) AS DOUBLE PRECISION) AS rating,
ARRAY_AGG(DISTINCT CAST(slv.version AS TEXT)) AS versions
FROM "StoreListing" sl
JOIN "AgentGraph" a ON sl."agentId" = a.id AND sl."agentVersion" = a."version"
LEFT JOIN "Profile" p ON sl."owningUserId" = p."userId"
LEFT JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl.id
LEFT JOIN ReviewStats rs ON sl.id = rs."storeListingId"
LEFT JOIN AgentRuns ar ON a.id = ar."agentGraphId"
WHERE sl."isDeleted" = FALSE
AND sl."isApproved" = TRUE
GROUP BY sl.id, slv.id, slv.slug, slv."createdAt", slv.name, slv."videoUrl", slv."imageUrls", slv."isFeatured",
p.username, p."avatarUrl", slv."subHeading", slv.description, slv.categories,
ar.run_count, rs.avg_rating;
COMMIT;

View File

@@ -1,2 +0,0 @@
-- Add "TERMINATED" to execution status enum type
ALTER TYPE "AgentExecutionStatus" ADD VALUE 'TERMINATED';

View File

@@ -1,86 +0,0 @@
/*
Warnings:
- You are about replace a single brace string input format for the following blocks:
- AgentOutputBlock
- FillTextTemplateBlock
- AITextGeneratorBlock
- AIStructuredResponseGeneratorBlock
with a double brace format.
- This migration can be slow for a large updated AgentNode tables.
*/
BEGIN;
SET LOCAL statement_timeout = '10min';
WITH to_update AS (
SELECT
"id",
"agentBlockId",
"constantInput"::jsonb AS j
FROM "AgentNode"
WHERE
"agentBlockId" IN (
'363ae599-353e-4804-937e-b2ee3cef3da4', -- AgentOutputBlock
'db7d8f02-2f44-4c55-ab7a-eae0941f0c30', -- FillTextTemplateBlock
'1f292d4a-41a4-4977-9684-7c8d560b9f91', -- AITextGeneratorBlock
'ed55ac19-356e-4243-a6cb-bc599e9b716f' -- AIStructuredResponseGeneratorBlock
)
AND (
"constantInput"::jsonb->>'format' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
OR "constantInput"::jsonb->>'prompt' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
OR "constantInput"::jsonb->>'sys_prompt' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
)
),
updated_rows AS (
SELECT
"id",
"agentBlockId",
(
j
-- Update "format" if it has a single-brace placeholder
|| CASE WHEN j->>'format' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
THEN jsonb_build_object(
'format',
regexp_replace(
j->>'format',
'(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})',
'{{\1}}',
'g'
)
)
ELSE '{}'::jsonb
END
-- Update "prompt" if it has a single-brace placeholder
|| CASE WHEN j->>'prompt' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
THEN jsonb_build_object(
'prompt',
regexp_replace(
j->>'prompt',
'(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})',
'{{\1}}',
'g'
)
)
ELSE '{}'::jsonb
END
-- Update "sys_prompt" if it has a single-brace placeholder
|| CASE WHEN j->>'sys_prompt' ~ '(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})'
THEN jsonb_build_object(
'sys_prompt',
regexp_replace(
j->>'sys_prompt',
'(?<!\{)\{\s*([a-zA-Z_][a-zA-Z0-9_]*)\s*\}(?!\})',
'{{\1}}',
'g'
)
)
ELSE '{}'::jsonb
END
)::text AS "newConstantInput"
FROM to_update
)
UPDATE "AgentNode" AS an
SET "constantInput" = ur."newConstantInput"
FROM updated_rows ur
WHERE an."id" = ur."id";
COMMIT;

File diff suppressed because it is too large Load Diff

View File

@@ -4,7 +4,7 @@ version = "0.3.4"
description = "A platform for building AI-powered agentic workflows"
authors = ["AutoGPT <info@agpt.co>"]
readme = "README.md"
packages = [{ include = "backend", format = "sdist" }]
packages = [{ include = "backend" }]
[tool.poetry.dependencies]
@@ -14,6 +14,7 @@ anthropic = "^0.40.0"
apscheduler = "^3.11.0"
autogpt-libs = { path = "../autogpt_libs", develop = true }
click = "^8.1.7"
croniter = "^5.0.1"
discord-py = "^2.4.0"
e2b-code-interpreter = "^1.0.1"
fastapi = "^0.115.5"
@@ -39,7 +40,7 @@ python-dotenv = "^1.0.1"
redis = "^5.2.0"
sentry-sdk = "2.19.2"
strenum = "^0.4.9"
supabase = "2.11.0"
supabase = "^2.10.0"
tenacity = "^9.0.0"
tweepy = "^4.14.0"
uvicorn = { extras = ["standard"], version = "^0.34.0" }

View File

@@ -216,7 +216,6 @@ enum AgentExecutionStatus {
QUEUED
RUNNING
COMPLETED
TERMINATED
FAILED
}
@@ -639,4 +638,4 @@ enum APIKeyStatus {
ACTIVE
REVOKED
SUSPENDED
}
}

View File

@@ -90,12 +90,7 @@ async def test_get_input_schema(server: SpinTestServer):
Node(
id="node_0_a",
block_id=input_block,
input_default={
"name": "in_key_a",
"title": "Key A",
"value": "A",
"advanced": True,
},
input_default={"name": "in_key_a", "title": "Key A", "value": "A"},
metadata={"id": "node_0_a"},
),
Node(
@@ -143,8 +138,8 @@ async def test_get_input_schema(server: SpinTestServer):
)
class ExpectedInputSchema(BlockSchema):
in_key_a: Any = SchemaField(title="Key A", default="A", advanced=True)
in_key_b: Any = SchemaField(title="in_key_b", advanced=False)
in_key_a: Any = SchemaField(title="Key A", default="A", advanced=False)
in_key_b: Any = SchemaField(title="in_key_b", advanced=True)
class ExpectedOutputSchema(BlockSchema):
out_key: Any = SchemaField(
@@ -160,45 +155,3 @@ async def test_get_input_schema(server: SpinTestServer):
output_schema = created_graph.output_schema
output_schema["title"] = "ExpectedOutputSchema"
assert output_schema == ExpectedOutputSchema.jsonschema()
@pytest.mark.asyncio(scope="session")
async def test_clean_graph(server: SpinTestServer):
"""
Test the clean_graph function that:
1. Clears input block values
2. Removes credentials from nodes
"""
# Create a graph with input blocks and credentials
graph = Graph(
id="test_clean_graph",
name="Test Clean Graph",
description="Test graph cleaning",
nodes=[
Node(
id="input_node",
block_id=AgentInputBlock().id,
input_default={
"name": "test_input",
"value": "test value",
"description": "Test input description",
},
),
],
links=[],
)
# Create graph and get model
create_graph = CreateGraph(graph=graph)
created_graph = await server.agent_server.test_create_graph(
create_graph, DEFAULT_USER_ID
)
# Clean the graph
created_graph.clean_graph()
# # Verify input block value is cleared
input_node = next(
n for n in created_graph.nodes if n.block_id == AgentInputBlock().id
)
assert input_node.input_default["value"] == ""

View File

@@ -102,7 +102,7 @@ async def assert_sample_graph_executions(
assert exec.graph_exec_id == graph_exec_id
assert exec.output_data == {"output": ["Hello, World!!!"]}
assert exec.input_data == {
"format": "{{a}}, {{b}}{{c}}",
"format": "{a}, {b}{c}",
"values": {"a": "Hello", "b": "World", "c": "!!!"},
"values_#_a": "Hello",
"values_#_b": "World",

View File

@@ -144,6 +144,51 @@ services:
networks:
- app-network
market:
build:
context: ../
dockerfile: autogpt_platform/market/Dockerfile
develop:
watch:
- path: ./
target: autogpt_platform/market/
action: rebuild
depends_on:
db:
condition: service_healthy
market-migrations:
condition: service_completed_successfully
environment:
- SUPABASE_URL=http://kong:8000
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=market
- BACKEND_CORS_ALLOW_ORIGINS="http://localhost:3000,http://127.0.0.1:3000"
ports:
- "8015:8015"
networks:
- app-network
market-migrations:
build:
context: ../
dockerfile: autogpt_platform/market/Dockerfile
command: ["sh", "-c", "poetry run prisma migrate deploy"]
develop:
watch:
- path: ./
target: autogpt_platform/market/
action: rebuild
depends_on:
db:
condition: service_healthy
environment:
- SUPABASE_URL=http://kong:8000
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=market
networks:
- app-network
# frontend:
# build:
# context: ../

View File

@@ -51,6 +51,18 @@ services:
file: ./docker-compose.platform.yml
service: websocket_server
market:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market
market-migrations:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market-migrations
# frontend:
# <<: *agpt-services
# extends:

View File

@@ -24,8 +24,8 @@
],
"dependencies": {
"@faker-js/faker": "^9.3.0",
"@hookform/resolvers": "^3.10.0",
"@next/third-parties": "^15.1.3",
"@hookform/resolvers": "^3.9.1",
"@next/third-parties": "^15.0.4",
"@radix-ui/react-alert-dialog": "^1.1.4",
"@radix-ui/react-avatar": "^1.1.1",
"@radix-ui/react-checkbox": "^1.1.2",
@@ -59,29 +59,29 @@
"dotenv": "^16.4.7",
"elliptic": "6.6.1",
"embla-carousel-react": "^8.3.0",
"framer-motion": "^11.16.0",
"framer-motion": "^11.15.0",
"geist": "^1.3.1",
"launchdarkly-react-client-sdk": "^3.6.0",
"lucide-react": "^0.469.0",
"lucide-react": "^0.468.0",
"moment": "^2.30.1",
"next": "^14.2.13",
"next-themes": "^0.4.4",
"react": "^18",
"react-day-picker": "^9.5.0",
"react-day-picker": "^9.4.4",
"react-dom": "^18",
"react-hook-form": "^7.54.2",
"react-hook-form": "^7.54.0",
"react-icons": "^5.4.0",
"react-markdown": "^9.0.3",
"react-modal": "^3.16.3",
"react-markdown": "^9.0.1",
"react-modal": "^3.16.1",
"react-shepherd": "^6.1.6",
"recharts": "^2.14.1",
"tailwind-merge": "^2.6.0",
"tailwind-merge": "^2.5.5",
"tailwindcss-animate": "^1.0.7",
"uuid": "^11.0.4",
"uuid": "^11.0.3",
"zod": "^3.23.8"
},
"devDependencies": {
"@chromatic-com/storybook": "^3.2.3",
"@chromatic-com/storybook": "^3.2.2",
"@playwright/test": "^1.48.2",
"@storybook/addon-a11y": "^8.3.5",
"@storybook/addon-essentials": "^8.4.2",
@@ -92,25 +92,25 @@
"@storybook/nextjs": "^8.4.2",
"@storybook/react": "^8.3.5",
"@storybook/test": "^8.3.5",
"@storybook/test-runner": "^0.21.0",
"@storybook/test-runner": "^0.20.1",
"@types/negotiator": "^0.6.3",
"@types/node": "^22.10.5",
"@types/node": "^22.9.0",
"@types/react": "^18",
"@types/react-dom": "^18",
"@types/react-modal": "^3.16.3",
"axe-playwright": "^2.0.3",
"chromatic": "^11.22.0",
"concurrently": "^9.1.2",
"chromatic": "^11.12.5",
"concurrently": "^9.0.1",
"eslint": "^8",
"eslint-config-next": "15.1.3",
"eslint-plugin-storybook": "^0.11.2",
"eslint-config-next": "15.1.0",
"eslint-plugin-storybook": "^0.11.0",
"msw": "^2.7.0",
"msw-storybook-addon": "^2.0.3",
"postcss": "^8",
"prettier": "^3.3.3",
"prettier-plugin-tailwindcss": "^0.6.9",
"storybook": "^8.4.5",
"tailwindcss": "^3.4.17",
"tailwindcss": "^3.4.15",
"typescript": "^5"
},
"packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e",

View File

@@ -1,6 +1,6 @@
import React from "react";
import type { Metadata } from "next";
import { Inter, Poppins } from "next/font/google";
import { Inter } from "next/font/google";
import { Providers } from "@/app/providers";
import { cn } from "@/lib/utils";
import { Navbar } from "@/components/agptui/Navbar";
@@ -10,16 +10,8 @@ import TallyPopupSimple from "@/components/TallyPopup";
import { GoogleAnalytics } from "@next/third-parties/google";
import { Toaster } from "@/components/ui/toaster";
import { IconType } from "@/components/ui/icons";
import { GeistSans } from "geist/font/sans";
import { GeistMono } from "geist/font/mono";
// Fonts
const inter = Inter({ subsets: ["latin"], variable: "--font-inter" });
const poppins = Poppins({
subsets: ["latin"],
weight: ["400", "500", "600", "700"],
variable: "--font-poppins",
});
const inter = Inter({ subsets: ["latin"] });
export const metadata: Metadata = {
title: "NextGen AutoGPT",
@@ -32,10 +24,7 @@ export default async function RootLayout({
children: React.ReactNode;
}>) {
return (
<html
lang="en"
className={`${GeistSans.variable} ${GeistMono.variable} ${poppins.variable} ${inter.variable}`}
>
<html lang="en">
<body className={cn("antialiased transition-colors", inter.className)}>
<Providers
attribute="class"

View File

@@ -5,7 +5,11 @@ import { z } from "zod";
import * as Sentry from "@sentry/nextjs";
import getServerSupabase from "@/lib/supabase/getServerSupabase";
import BackendAPI from "@/lib/autogpt-server-api";
import { loginFormSchema, LoginProvider } from "@/types/auth";
const loginFormSchema = z.object({
email: z.string().email().min(2).max(64),
password: z.string().min(6).max(64),
});
export async function logout() {
return await Sentry.withServerActionInstrumentation(
@@ -21,7 +25,7 @@ export async function logout() {
const { error } = await supabase.auth.signOut();
if (error) {
console.error("Error logging out", error);
console.log("Error logging out", error);
return error.message;
}
@@ -43,13 +47,18 @@ export async function login(values: z.infer<typeof loginFormSchema>) {
// We are sure that the values are of the correct type because zod validates the form
const { data, error } = await supabase.auth.signInWithPassword(values);
await api.createUser();
if (error) {
console.error("Error logging in", error);
console.log("Error logging in", error);
if (error.status == 400) {
// Hence User is not present
redirect("/login");
}
return error.message;
}
await api.createUser();
if (data.session) {
await supabase.auth.setSession(data.session);
}
@@ -59,34 +68,38 @@ export async function login(values: z.infer<typeof loginFormSchema>) {
});
}
export async function providerLogin(provider: LoginProvider) {
export async function signup(values: z.infer<typeof loginFormSchema>) {
"use server";
return await Sentry.withServerActionInstrumentation(
"providerLogin",
"signup",
{},
async () => {
const supabase = getServerSupabase();
const api = new BackendAPI();
if (!supabase) {
redirect("/error");
}
const { error } = await supabase!.auth.signInWithOAuth({
provider: provider,
options: {
redirectTo:
process.env.AUTH_CALLBACK_URL ??
`http://localhost:3000/auth/callback`,
},
});
// We are sure that the values are of the correct type because zod validates the form
const { data, error } = await supabase.auth.signUp(values);
if (error) {
console.error("Error logging in", error);
console.log("Error signing up", error);
if (error.message.includes("P0001")) {
return "Please join our waitlist for your turn: https://agpt.co/waitlist";
}
if (error.code?.includes("user_already_exists")) {
redirect("/login");
}
return error.message;
}
await api.createUser();
console.log("Logged in");
if (data.session) {
await supabase.auth.setSession(data.session);
}
console.log("Signed up");
revalidatePath("/", "layout");
redirect("/store/profile");
},
);
}

View File

@@ -1,8 +1,10 @@
"use client";
import { login, providerLogin } from "./actions";
import { login, signup } from "./actions";
import { Button } from "@/components/ui/button";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
@@ -12,69 +14,40 @@ import { useForm } from "react-hook-form";
import { Input } from "@/components/ui/input";
import { z } from "zod";
import { zodResolver } from "@hookform/resolvers/zod";
import { useCallback, useState } from "react";
import { PasswordInput } from "@/components/PasswordInput";
import { FaGoogle, FaGithub, FaDiscord, FaSpinner } from "react-icons/fa";
import { useState } from "react";
import { useRouter } from "next/navigation";
import Link from "next/link";
import { Checkbox } from "@/components/ui/checkbox";
import useSupabase from "@/hooks/useSupabase";
import Spinner from "@/components/Spinner";
import {
AuthCard,
AuthHeader,
AuthButton,
AuthFeedback,
AuthBottomText,
PasswordInput,
} from "@/components/auth";
import { loginFormSchema } from "@/types/auth";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
const loginFormSchema = z.object({
email: z.string().email().min(2).max(64),
password: z.string().min(6).max(64),
agreeToTerms: z.boolean().refine((value) => value === true, {
message: "You must agree to the Terms of Use and Privacy Policy",
}),
});
export default function LoginPage() {
const { supabase, user, isUserLoading } = useSupabase();
const [feedback, setFeedback] = useState<string | null>(null);
const router = useRouter();
const [isLoading, setIsLoading] = useState(false);
const api = useBackendAPI();
const form = useForm<z.infer<typeof loginFormSchema>>({
resolver: zodResolver(loginFormSchema),
defaultValues: {
email: "",
password: "",
agreeToTerms: false,
},
});
// TODO: uncomment when we enable social login
// const onProviderLogin = useCallback(async (
// provider: LoginProvider,
// ) => {
// setIsLoading(true);
// const error = await providerLogin(provider);
// setIsLoading(false);
// if (error) {
// setFeedback(error);
// return;
// }
// setFeedback(null);
// }, [supabase]);
const onLogin = useCallback(
async (data: z.infer<typeof loginFormSchema>) => {
setIsLoading(true);
if (!(await form.trigger())) {
setIsLoading(false);
return;
}
const error = await login(data);
setIsLoading(false);
if (error) {
setFeedback(error);
return;
}
setFeedback(null);
},
[form],
);
if (user) {
console.debug("User exists, redirecting to /");
router.push("/");
@@ -92,60 +65,179 @@ export default function LoginPage() {
);
}
async function handleSignInWithProvider(
provider: "google" | "github" | "discord",
) {
const { data, error } = await supabase!.auth.signInWithOAuth({
provider: provider,
options: {
redirectTo:
process.env.AUTH_CALLBACK_URL ??
`http://localhost:3000/auth/callback`,
},
});
await api.createUser();
if (!error) {
setFeedback(null);
return;
}
setFeedback(error.message);
}
const onLogin = async (data: z.infer<typeof loginFormSchema>) => {
setIsLoading(true);
const error = await login(data);
setIsLoading(false);
if (error) {
setFeedback(error);
return;
}
setFeedback(null);
};
return (
<AuthCard>
<AuthHeader>Login to your account</AuthHeader>
<Form {...form}>
<form onSubmit={form.handleSubmit(onLogin)}>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Email</FormLabel>
<FormControl>
<Input placeholder="m@example.com" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="password"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel className="flex w-full items-center justify-between">
<span>Password</span>
<Link
href="/reset_password"
className="text-sm font-normal leading-normal text-black underline"
>
Forgot your password?
</Link>
</FormLabel>
<FormControl>
<PasswordInput {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<AuthButton
onClick={() => onLogin(form.getValues())}
isLoading={isLoading}
type="submit"
<div className="flex h-[80vh] items-center justify-center">
<div className="w-full max-w-md space-y-6 rounded-lg p-8 shadow-md">
<h1 className="text-lg font-medium">Log in to your Account </h1>
{/* <div className="mb-6 space-y-2">
<Button
className="w-full"
onClick={() => handleSignInWithProvider("google")}
variant="outline"
type="button"
disabled={isLoading}
>
Login
</AuthButton>
</form>
<AuthFeedback message={feedback} isError={true} />
</Form>
<AuthBottomText
text="Don't have an account?"
linkText="Sign up"
href="/signup"
/>
</AuthCard>
<FaGoogle className="mr-2 h-4 w-4" />
Sign in with Google
</Button>
<Button
className="w-full"
onClick={() => handleSignInWithProvider("github")}
variant="outline"
type="button"
disabled={isLoading}
>
<FaGithub className="mr-2 h-4 w-4" />
Sign in with GitHub
</Button>
<Button
className="w-full"
onClick={() => handleSignInWithProvider("discord")}
variant="outline"
type="button"
disabled={isLoading}
>
<FaDiscord className="mr-2 h-4 w-4" />
Sign in with Discord
</Button>
</div> */}
<Form {...form}>
<form onSubmit={form.handleSubmit(onLogin)}>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem className="mb-4">
<FormLabel>Email</FormLabel>
<FormControl>
<Input placeholder="user@email.com" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="password"
render={({ field }) => (
<FormItem>
<FormLabel>Password</FormLabel>
<FormControl>
<PasswordInput placeholder="password" {...field} />
</FormControl>
<FormDescription>
Password needs to be at least 6 characters long
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="agreeToTerms"
render={({ field }) => (
<FormItem className="mt-4 flex flex-row items-start space-x-3 space-y-0">
<FormControl>
<Checkbox
checked={field.value}
onCheckedChange={field.onChange}
/>
</FormControl>
<div className="space-y-1 leading-none">
<FormLabel>
I agree to the{" "}
<Link
href="https://auto-gpt.notion.site/Terms-of-Use-11400ef5bece80d0b087d7831c5fd6bf"
className="underline"
>
Terms of Use
</Link>{" "}
and{" "}
<Link
href="https://www.notion.so/auto-gpt/Privacy-Policy-ab11c9c20dbd4de1a15dcffe84d77984"
className="underline"
>
Privacy Policy
</Link>
</FormLabel>
<FormMessage />
</div>
</FormItem>
)}
/>
<div className="mb-6 mt-8 flex w-full space-x-4">
<Button
className="flex w-full justify-center"
type="submit"
disabled={isLoading}
onClick={async () => {
setIsLoading(true);
const values = form.getValues();
const result = await login(values);
if (result) {
setFeedback(result);
}
setIsLoading(false);
}}
>
{isLoading ? <FaSpinner className="animate-spin" /> : "Log in"}
</Button>
<Button
className="flex w-full justify-center"
type="button"
disabled={isLoading}
onClick={async () => {
setIsLoading(true);
const values = form.getValues();
const result = await signup(values);
if (result) {
setFeedback(result);
}
setIsLoading(false);
}}
>
{isLoading ? <FaSpinner className="animate-spin" /> : "Sign up"}
</Button>
</div>
</form>
<p className="text-sm text-red-500">{feedback}</p>
</Form>
<Link href="/reset_password" className="text-sm">
Forgot your password?
</Link>
</div>
</div>
);
}

View File

@@ -1,7 +0,0 @@
"use client";
import { redirect } from "next/navigation";
export default function Page() {
redirect("/store");
}

View File

@@ -1,60 +0,0 @@
"use server";
import getServerSupabase from "@/lib/supabase/getServerSupabase";
import { redirect } from "next/navigation";
import * as Sentry from "@sentry/nextjs";
import { headers } from "next/headers";
export async function sendResetEmail(email: string) {
return await Sentry.withServerActionInstrumentation(
"sendResetEmail",
{},
async () => {
const supabase = getServerSupabase();
const headersList = headers();
const host = headersList.get("host");
const protocol =
process.env.NODE_ENV === "development" ? "http" : "https";
const origin = `${protocol}://${host}`;
if (!supabase) {
redirect("/error");
}
const { error } = await supabase.auth.resetPasswordForEmail(email, {
redirectTo: `${origin}/reset_password`,
});
if (error) {
console.error("Error sending reset email", error);
return error.message;
}
console.log("Reset email sent");
redirect("/reset_password");
},
);
}
export async function changePassword(password: string) {
return await Sentry.withServerActionInstrumentation(
"changePassword",
{},
async () => {
const supabase = getServerSupabase();
if (!supabase) {
redirect("/error");
}
const { error } = await supabase.auth.updateUser({ password });
if (error) {
console.error("Error changing password", error);
return error.message;
}
await supabase.auth.signOut();
redirect("/login");
},
);
}

View File

@@ -1,15 +1,8 @@
"use client";
import {
AuthCard,
AuthHeader,
AuthButton,
AuthFeedback,
PasswordInput,
} from "@/components/auth";
import { Button } from "@/components/ui/button";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
@@ -17,87 +10,54 @@ import {
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import useSupabase from "@/hooks/useSupabase";
import { sendEmailFormSchema, changePasswordFormSchema } from "@/types/auth";
import { zodResolver } from "@hookform/resolvers/zod";
import { useCallback, useState } from "react";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { useForm } from "react-hook-form";
import { FaSpinner } from "react-icons/fa";
import { z } from "zod";
import { changePassword, sendResetEmail } from "./actions";
import Spinner from "@/components/Spinner";
const emailFormSchema = z.object({
email: z.string().email().min(2).max(64),
});
const resetPasswordFormSchema = z
.object({
password: z.string().min(6).max(64),
confirmPassword: z.string().min(6).max(64),
})
.refine((data) => data.password === data.confirmPassword, {
message: "Passwords don't match",
path: ["confirmPassword"],
});
export default function ResetPasswordPage() {
const { supabase, user, isUserLoading } = useSupabase();
const router = useRouter();
const [isLoading, setIsLoading] = useState(false);
const [feedback, setFeedback] = useState<string | null>(null);
const [isError, setIsError] = useState(false);
const [disabled, setDisabled] = useState(false);
const sendEmailForm = useForm<z.infer<typeof sendEmailFormSchema>>({
resolver: zodResolver(sendEmailFormSchema),
const emailForm = useForm<z.infer<typeof emailFormSchema>>({
resolver: zodResolver(emailFormSchema),
defaultValues: {
email: "",
},
});
const changePasswordForm = useForm<z.infer<typeof changePasswordFormSchema>>({
resolver: zodResolver(changePasswordFormSchema),
const resetPasswordForm = useForm<z.infer<typeof resetPasswordFormSchema>>({
resolver: zodResolver(resetPasswordFormSchema),
defaultValues: {
password: "",
confirmPassword: "",
},
});
const onSendEmail = useCallback(
async (data: z.infer<typeof sendEmailFormSchema>) => {
setIsLoading(true);
setFeedback(null);
if (!(await sendEmailForm.trigger())) {
setIsLoading(false);
return;
}
const error = await sendResetEmail(data.email);
setIsLoading(false);
if (error) {
setFeedback(error);
setIsError(true);
return;
}
setDisabled(true);
setFeedback(
"Password reset email sent if user exists. Please check your email.",
);
setIsError(false);
},
[sendEmailForm],
);
const onChangePassword = useCallback(
async (data: z.infer<typeof changePasswordFormSchema>) => {
setIsLoading(true);
setFeedback(null);
if (!(await changePasswordForm.trigger())) {
setIsLoading(false);
return;
}
const error = await changePassword(data.password);
setIsLoading(false);
if (error) {
setFeedback(error);
setIsError(true);
return;
}
setFeedback("Password changed successfully. Redirecting to login.");
setIsError(false);
},
[changePasswordForm],
);
if (isUserLoading) {
return <Spinner />;
return (
<div className="flex h-[80vh] items-center justify-center">
<FaSpinner className="mr-2 h-16 w-16 animate-spin" />
</div>
);
}
if (!supabase) {
@@ -108,79 +68,147 @@ export default function ResetPasswordPage() {
);
}
async function onSendEmail(d: z.infer<typeof emailFormSchema>) {
setIsLoading(true);
setFeedback(null);
if (!(await emailForm.trigger())) {
setIsLoading(false);
return;
}
const { data, error } = await supabase!.auth.resetPasswordForEmail(
d.email,
{
redirectTo: `${window.location.origin}/reset_password`,
},
);
if (error) {
setFeedback(error.message);
setIsLoading(false);
return;
}
setFeedback("Password reset email sent. Please check your email.");
setIsLoading(false);
}
async function onResetPassword(d: z.infer<typeof resetPasswordFormSchema>) {
setIsLoading(true);
setFeedback(null);
if (!(await resetPasswordForm.trigger())) {
setIsLoading(false);
return;
}
const { data, error } = await supabase!.auth.updateUser({
password: d.password,
});
if (error) {
setFeedback(error.message);
setIsLoading(false);
return;
}
await supabase!.auth.signOut();
router.push("/login");
}
return (
<AuthCard>
<AuthHeader>Reset Password</AuthHeader>
{user ? (
<form onSubmit={changePasswordForm.handleSubmit(onChangePassword)}>
<Form {...changePasswordForm}>
<FormField
control={changePasswordForm.control}
name="password"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Password</FormLabel>
<FormControl>
<PasswordInput {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={changePasswordForm.control}
name="confirmPassword"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Confirm Password</FormLabel>
<FormControl>
<PasswordInput {...field} />
</FormControl>
<FormDescription className="text-sm font-normal leading-tight text-slate-500">
Password needs to be at least 6 characters long
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<AuthButton
onClick={() => onChangePassword(changePasswordForm.getValues())}
isLoading={isLoading}
type="submit"
>
Update password
</AuthButton>
<AuthFeedback message={feedback} isError={isError} />
</Form>
</form>
) : (
<form onSubmit={sendEmailForm.handleSubmit(onSendEmail)}>
<Form {...sendEmailForm}>
<FormField
control={sendEmailForm.control}
name="email"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Email</FormLabel>
<FormControl>
<Input placeholder="m@example.com" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<AuthButton
onClick={() => onSendEmail(sendEmailForm.getValues())}
isLoading={isLoading}
disabled={disabled}
type="submit"
>
Send reset email
</AuthButton>
<AuthFeedback message={feedback} isError={isError} />
</Form>
</form>
)}
</AuthCard>
<div className="flex h-full flex-col items-center justify-center">
<div className="w-full max-w-md">
<h1 className="text-center text-3xl font-bold">Reset Password</h1>
{user ? (
<form
onSubmit={resetPasswordForm.handleSubmit(onResetPassword)}
className="mt-6 space-y-6"
>
<Form {...resetPasswordForm}>
<FormField
control={resetPasswordForm.control}
name="password"
render={({ field }) => (
<FormItem className="mb-4">
<FormLabel>Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="password"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={resetPasswordForm.control}
name="confirmPassword"
render={({ field }) => (
<FormItem className="mb">
<FormLabel>Confirm Password</FormLabel>
<FormControl>
<Input
type="password"
placeholder="password"
{...field}
/>
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<Button
type="submit"
className="w-full"
disabled={isLoading}
onClick={() => onResetPassword(resetPasswordForm.getValues())}
>
{isLoading ? <FaSpinner className="mr-2 animate-spin" /> : null}
Reset Password
</Button>
</Form>
</form>
) : (
<form
onSubmit={emailForm.handleSubmit(onSendEmail)}
className="mt-6 space-y-6"
>
<Form {...emailForm}>
<FormField
control={emailForm.control}
name="email"
render={({ field }) => (
<FormItem className="mb-4">
<FormLabel>Email</FormLabel>
<FormControl>
<Input placeholder="user@email.com" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<Button
type="submit"
className="w-full"
disabled={isLoading}
onClick={() => onSendEmail(emailForm.getValues())}
>
{isLoading ? <FaSpinner className="mr-2 animate-spin" /> : null}
Send Reset Email
</Button>
{feedback ? (
<div className="text-center text-sm text-red-500">
{feedback}
</div>
) : null}
</Form>
</form>
)}
</div>
</div>
);
}

View File

@@ -1,44 +0,0 @@
"use server";
import { revalidatePath } from "next/cache";
import { redirect } from "next/navigation";
import { z } from "zod";
import * as Sentry from "@sentry/nextjs";
import getServerSupabase from "@/lib/supabase/getServerSupabase";
import { signupFormSchema } from "@/types/auth";
export async function signup(values: z.infer<typeof signupFormSchema>) {
"use server";
return await Sentry.withServerActionInstrumentation(
"signup",
{},
async () => {
const supabase = getServerSupabase();
if (!supabase) {
redirect("/error");
}
// We are sure that the values are of the correct type because zod validates the form
const { data, error } = await supabase.auth.signUp(values);
if (error) {
console.error("Error signing up", error);
// FIXME: supabase doesn't return the correct error message for this case
if (error.message.includes("P0001")) {
return "Please join our waitlist for your turn: https://agpt.co/waitlist";
}
if (error.code?.includes("user_already_exists")) {
redirect("/login");
}
return error.message;
}
if (data.session) {
await supabase.auth.setSession(data.session);
}
console.log("Signed up");
revalidatePath("/", "layout");
redirect("/store/profile");
},
);
}

View File

@@ -1,219 +0,0 @@
"use client";
import { signup } from "./actions";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { useForm } from "react-hook-form";
import { Input } from "@/components/ui/input";
import { z } from "zod";
import { zodResolver } from "@hookform/resolvers/zod";
import { useCallback, useState } from "react";
import { useRouter } from "next/navigation";
import Link from "next/link";
import { Checkbox } from "@/components/ui/checkbox";
import useSupabase from "@/hooks/useSupabase";
import Spinner from "@/components/Spinner";
import {
AuthCard,
AuthHeader,
AuthButton,
AuthFeedback,
AuthBottomText,
PasswordInput,
} from "@/components/auth";
import { signupFormSchema } from "@/types/auth";
export default function SignupPage() {
const { supabase, user, isUserLoading } = useSupabase();
const [feedback, setFeedback] = useState<string | null>(null);
const router = useRouter();
const [isLoading, setIsLoading] = useState(false);
const [showWaitlistPrompt, setShowWaitlistPrompt] = useState(false);
const form = useForm<z.infer<typeof signupFormSchema>>({
resolver: zodResolver(signupFormSchema),
defaultValues: {
email: "",
password: "",
confirmPassword: "",
agreeToTerms: false,
},
});
const onSignup = useCallback(
async (data: z.infer<typeof signupFormSchema>) => {
setIsLoading(true);
if (!(await form.trigger())) {
setIsLoading(false);
return;
}
const error = await signup(data);
setIsLoading(false);
if (error) {
setShowWaitlistPrompt(true);
return;
}
setFeedback(null);
},
[form],
);
if (user) {
console.debug("User exists, redirecting to /");
router.push("/");
}
if (isUserLoading || user) {
return <Spinner />;
}
if (!supabase) {
return (
<div>
User accounts are disabled because Supabase client is unavailable
</div>
);
}
return (
<AuthCard>
<AuthHeader>Create a new account</AuthHeader>
<Form {...form}>
<form onSubmit={form.handleSubmit(onSignup)}>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Email</FormLabel>
<FormControl>
<Input placeholder="m@example.com" {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="password"
render={({ field }) => (
<FormItem className="mb-6">
<FormLabel>Password</FormLabel>
<FormControl>
<PasswordInput {...field} />
</FormControl>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="confirmPassword"
render={({ field }) => (
<FormItem className="mb-4">
<FormLabel>Confirm Password</FormLabel>
<FormControl>
<PasswordInput {...field} />
</FormControl>
<FormDescription className="text-sm font-normal leading-tight text-slate-500">
Password needs to be at least 6 characters long
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<AuthButton
onClick={() => onSignup(form.getValues())}
isLoading={isLoading}
type="submit"
>
Sign up
</AuthButton>
<FormField
control={form.control}
name="agreeToTerms"
render={({ field }) => (
<FormItem className="mt-6 flex flex-row items-start -space-y-1 space-x-2">
<FormControl>
<Checkbox
checked={field.value}
onCheckedChange={field.onChange}
/>
</FormControl>
<div className="">
<FormLabel>
<span className="mr-1 text-sm font-normal leading-normal text-slate-950">
I agree to the
</span>
<Link
href="https://auto-gpt.notion.site/Terms-of-Use-11400ef5bece80d0b087d7831c5fd6bf"
className="text-sm font-normal leading-normal text-slate-950 underline"
>
Terms of Use
</Link>
<span className="mx-1 text-sm font-normal leading-normal text-slate-950">
and
</span>
<Link
href="https://www.notion.so/auto-gpt/Privacy-Policy-ab11c9c20dbd4de1a15dcffe84d77984"
className="text-sm font-normal leading-normal text-slate-950 underline"
>
Privacy Policy
</Link>
</FormLabel>
<FormMessage />
</div>
</FormItem>
)}
/>
</form>
<AuthFeedback message={feedback} isError={true} />
</Form>
{showWaitlistPrompt && (
<div>
<span className="mr-1 text-sm font-normal leading-normal text-red-500">
The provided email may not be allowed to sign up.
</span>
<br />
<span className="mx-1 text-sm font-normal leading-normal text-slate-950">
- AutoGPT Platform is currently in closed beta. You can join
</span>
<Link
href="https://agpt.co/waitlist"
className="text-sm font-normal leading-normal text-slate-950 underline"
>
the waitlist here.
</Link>
<br />
<span className="mx-1 text-sm font-normal leading-normal text-slate-950">
- Make sure you use the same email address you used to sign up for
the waitlist.
</span>
<br />
<span className="mx-1 text-sm font-normal leading-normal text-slate-950">
- You can self host the platform, visit our
</span>
<Link
href="https://agpt.co/waitlist"
className="text-sm font-normal leading-normal text-slate-950 underline"
>
GitHub repository.
</Link>
</div>
)}
<AuthBottomText
text="Already a member?"
linkText="Log in"
href="/login"
/>
</AuthCard>
);
}

View File

@@ -1,11 +0,0 @@
import { APIKeysSection } from "@/components/agptui/composite/APIKeySection";
const ApiKeysPage = () => {
return (
<div className="w-full pr-4 pt-24 md:pt-0">
<APIKeysSection />
</div>
);
};
export default ApiKeysPage;

View File

@@ -8,7 +8,6 @@ export default function Layout({ children }: { children: React.ReactNode }) {
{ text: "Creator Dashboard", href: "/store/dashboard" },
{ text: "Agent dashboard", href: "/store/agent-dashboard" },
{ text: "Integrations", href: "/store/integrations" },
{ text: "API Keys", href: "/store/api_keys" },
{ text: "Profile", href: "/store/profile" },
{ text: "Settings", href: "/store/settings" },
],
@@ -18,7 +17,7 @@ export default function Layout({ children }: { children: React.ReactNode }) {
return (
<div className="flex min-h-screen w-screen max-w-[1360px] flex-col lg:flex-row">
<Sidebar linkGroups={sidebarLinkGroups} />
<div className="flex-1 pl-4">{children}</div>
<div className="pl-4">{children}</div>
</div>
);
}

View File

@@ -40,8 +40,7 @@ export default async function Page({
const agent = await api.getStoreAgent(creator_lower, params.slug);
const otherAgents = await api.getStoreAgents({ creator: creator_lower });
const similarAgents = await api.getStoreAgents({
// We are using slug as we know its has been sanitized and is not null
search_query: agent.slug.replace(/-/g, " "),
search_query: agent.categories[0],
});
const breadcrumbs = [

View File

@@ -245,7 +245,6 @@ export function CustomNode({
].includes(nodeType) &&
// No input connection handles for credentials
propKey !== "credentials" &&
!propKey.endsWith("_credentials") &&
// For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle
!(nodeType == BlockUIType.OUTPUT && propKey == "name");
const isConnected = isInputHandleConnected(propKey);
@@ -253,13 +252,7 @@ export function CustomNode({
!isHidden &&
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
<div key={propKey} data-id={`input-handle-${propKey}`}>
{isConnectable &&
!(
"oneOf" in propSchema &&
propSchema.oneOf &&
"discriminator" in propSchema &&
propSchema.discriminator
) ? (
{isConnectable ? (
<NodeHandle
keyName={propKey}
isConnected={isConnected}
@@ -268,8 +261,7 @@ export function CustomNode({
side="left"
/>
) : (
propKey !== "credentials" &&
!propKey.endsWith("_credentials") && (
propKey != "credentials" && (
<div className="flex gap-1">
<span className="text-m green mb-0 text-gray-900 dark:text-gray-100">
{propSchema.title || beautifyString(propKey)}
@@ -734,10 +726,13 @@ export function CustomNode({
</div>
{/* Body */}
<div className="mx-5 my-6 rounded-b-xl">
<div className="ml-5 mt-6 rounded-b-xl">
{/* Input Handles */}
{data.uiType !== BlockUIType.NOTE ? (
<div data-id="input-handles">
<div
className="flex w-fit items-start justify-between"
data-id="input-handles"
>
<div>
{data.uiType === BlockUIType.WEBHOOK_MANUAL &&
(data.webhook ? (
@@ -786,6 +781,7 @@ export function CustomNode({
<Switch
onCheckedChange={toggleAdvancedSettings}
checked={isAdvancedOpen}
className="mr-5"
/>
</div>
</>
@@ -794,7 +790,7 @@ export function CustomNode({
{data.uiType !== BlockUIType.NOTE && (
<>
<LineSeparator />
<div className="flex items-start justify-end rounded-b-xl pt-6">
<div className="flex items-start justify-end rounded-b-xl pb-2 pr-2 pt-6">
<div className="flex-none">
{data.outputSchema &&
generateOutputHandles(data.outputSchema, data.uiType)}
@@ -854,10 +850,8 @@ export function CustomNode({
data.status === "COMPLETED",
"border-yellow-600 bg-yellow-600 text-white":
data.status === "RUNNING",
"border-red-600 bg-red-600 text-white": [
"FAILED",
"TERMINATED",
].includes(data.status || ""),
"border-red-600 bg-red-600 text-white":
data.status === "FAILED",
"border-blue-600 bg-blue-600 text-white":
data.status === "QUEUED",
"border-gray-600 bg-gray-600 font-black":

View File

@@ -82,7 +82,7 @@ const NodeHandle: FC<HandleProps> = ({
data-testid={`output-handle-${keyName}`}
position={Position.Right}
id={keyName}
className="group -mr-[38px]"
className="group -mr-[26px]"
>
<div className="pointer-events-none flex items-center">
{label}

View File

@@ -16,7 +16,6 @@ const PasswordInput = forwardRef<HTMLInputElement, InputProps>(
type={showPassword ? "text" : "password"}
className={cn("hide-password-toggle pr-10", className)}
ref={ref}
title="password"
{...props}
/>
<Button
@@ -24,11 +23,8 @@ const PasswordInput = forwardRef<HTMLInputElement, InputProps>(
variant="ghost"
size="sm"
className="absolute right-0 top-0 h-full px-3 py-2 hover:bg-transparent"
onMouseDown={() => setShowPassword(true)}
onMouseUp={() => setShowPassword(false)}
onMouseLeave={() => setShowPassword(false)}
onClick={() => setShowPassword((prev) => !prev)}
disabled={disabled}
tabIndex={-1}
>
{showPassword && !disabled ? (
<EyeIcon className="h-4 w-4" aria-hidden="true" />

View File

@@ -61,7 +61,7 @@ const TallyPopupSimple = () => {
<Button
variant="default"
onClick={resetTutorial}
className="mb-0 h-14 w-28 rounded-2xl bg-[rgba(65,65,64,1)] text-left font-inter text-lg font-medium leading-6"
className="font-inter mb-0 h-14 w-28 rounded-2xl bg-[rgba(65,65,64,1)] text-left text-lg font-medium leading-6"
>
Tutorial
</Button>

View File

@@ -6,10 +6,6 @@ import { Separator } from "@/components/ui/separator";
import BackendAPI from "@/lib/autogpt-server-api";
import { useRouter } from "next/navigation";
import Link from "next/link";
import { useToast } from "@/components/ui/use-toast";
import useSupabase from "@/hooks/useSupabase";
import { DownloadIcon, LoaderIcon } from "lucide-react";
interface AgentInfoProps {
name: string;
creator: string;
@@ -36,11 +32,8 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
storeListingVersionId,
}) => {
const router = useRouter();
const api = React.useMemo(() => new BackendAPI(), []);
const { user } = useSupabase();
const { toast } = useToast();
const [downloading, setDownloading] = React.useState(false);
const api = React.useMemo(() => new BackendAPI(), []);
const handleAddToLibrary = async () => {
try {
@@ -52,46 +45,6 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
}
};
const handleDownloadToLibrary = async () => {
const downloadAgent = async (): Promise<void> => {
setDownloading(true);
try {
const file = await api.downloadStoreAgent(storeListingVersionId);
// Similar to Marketplace v1
const jsonData = JSON.stringify(file, null, 2);
// Create a Blob from the file content
const blob = new Blob([jsonData], { type: "application/json" });
// Create a temporary URL for the Blob
const url = window.URL.createObjectURL(blob);
// Create a temporary anchor element
const a = document.createElement("a");
a.href = url;
a.download = `agent_${storeListingVersionId}.json`; // Set the filename
// Append the anchor to the body, click it, and remove it
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
// Revoke the temporary URL
window.URL.revokeObjectURL(url);
toast({
title: "Download Complete",
description: "Your agent has been successfully downloaded.",
});
} catch (error) {
console.error(`Error downloading agent:`, error);
throw error;
}
};
await downloadAgent();
setDownloading(false);
};
return (
<div className="w-full max-w-[396px] px-4 sm:px-6 lg:w-[396px] lg:px-0">
{/* Title */}
@@ -119,36 +72,15 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
{/* Run Agent Button */}
<div className="mb-4 w-full lg:mb-[60px]">
{user ? (
<button
onClick={handleAddToLibrary}
className="inline-flex w-full items-center justify-center gap-2 rounded-[38px] bg-violet-600 px-4 py-3 transition-colors hover:bg-violet-700 sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4"
>
<IconPlay className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
Add To Library
</span>
</button>
) : (
<button
onClick={handleDownloadToLibrary}
className={`inline-flex w-full items-center justify-center gap-2 rounded-[38px] px-4 py-3 transition-colors sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4 ${
downloading
? "bg-neutral-400"
: "bg-violet-600 hover:bg-violet-700"
}`}
disabled={downloading}
>
{downloading ? (
<LoaderIcon className="h-5 w-5 animate-spin text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
) : (
<DownloadIcon className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
)}
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
{downloading ? "Downloading..." : "Download Agent as File"}
</span>
</button>
)}
<button
onClick={handleAddToLibrary}
className="inline-flex w-full items-center justify-center gap-2 rounded-[38px] bg-violet-600 px-4 py-3 transition-colors hover:bg-violet-700 sm:w-auto sm:gap-2.5 sm:px-5 sm:py-3.5 lg:px-6 lg:py-4"
>
<IconPlay className="h-5 w-5 text-white sm:h-5 sm:w-5 lg:h-6 lg:w-6" />
<span className="font-poppins text-base font-medium text-neutral-50 sm:text-lg">
Add To Library
</span>
</button>
</div>
{/* Rating and Runs */}

View File

@@ -2,7 +2,7 @@ import * as React from "react";
import Link from "next/link";
import { Button } from "./Button";
import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet";
import { KeyIcon, Menu } from "lucide-react";
import { Menu } from "lucide-react";
import {
IconDashboardLayout,
IconIntegrations,
@@ -58,15 +58,6 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
Integrations
</div>
</Link>
<Link
href="/store/api_keys"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
>
<KeyIcon className="h-6 w-6" />
<div className="p-ui-medium text-base font-medium leading-normal">
API Keys
</div>
</Link>
<Link
href="/store/profile"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
@@ -111,15 +102,6 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
Integrations
</div>
</Link>
<Link
href="/store/api_keys"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
>
<KeyIcon className="h-6 w-6" strokeWidth={1} />
<div className="p-ui-medium text-base font-medium leading-normal">
API Keys
</div>
</Link>
<Link
href="/store/profile"
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"

View File

@@ -1,296 +0,0 @@
"use client";
import { useState, useEffect } from "react";
import { APIKey, APIKeyPermission } from "@/lib/autogpt-server-api/types";
import { LuCopy } from "react-icons/lu";
import { Loader2, MoreVertical } from "lucide-react";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { useToast } from "@/components/ui/use-toast";
import {
Card,
CardContent,
CardDescription,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import { Button } from "@/components/ui/button";
import { Label } from "@/components/ui/label";
import { Input } from "@/components/ui/input";
import { Checkbox } from "@/components/ui/checkbox";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/ui/table";
import { Badge } from "@/components/ui/badge";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
export function APIKeysSection() {
const [apiKeys, setApiKeys] = useState<APIKey[]>([]);
const [isLoading, setIsLoading] = useState(true);
const [isCreateOpen, setIsCreateOpen] = useState(false);
const [isKeyDialogOpen, setIsKeyDialogOpen] = useState(false);
const [newKeyName, setNewKeyName] = useState("");
const [newKeyDescription, setNewKeyDescription] = useState("");
const [newApiKey, setNewApiKey] = useState("");
const [selectedPermissions, setSelectedPermissions] = useState<
APIKeyPermission[]
>([]);
const { toast } = useToast();
const api = useBackendAPI();
useEffect(() => {
loadAPIKeys();
}, []);
const loadAPIKeys = async () => {
setIsLoading(true);
try {
const keys = await api.listAPIKeys();
setApiKeys(keys.filter((key) => key.status === "ACTIVE"));
} finally {
setIsLoading(false);
}
};
const handleCreateKey = async () => {
try {
const response = await api.createAPIKey(
newKeyName,
selectedPermissions,
newKeyDescription,
);
setNewApiKey(response.plain_text_key);
setIsCreateOpen(false);
setIsKeyDialogOpen(true);
loadAPIKeys();
} catch (error) {
toast({
title: "Error",
description: "Failed to create AutoGPT Platform API key",
variant: "destructive",
});
}
};
const handleCopyKey = () => {
navigator.clipboard.writeText(newApiKey);
toast({
title: "Copied",
description: "AutoGPT Platform API key copied to clipboard",
});
};
const handleRevokeKey = async (keyId: string) => {
try {
await api.revokeAPIKey(keyId);
toast({
title: "Success",
description: "AutoGPT Platform API key revoked successfully",
});
loadAPIKeys();
} catch (error) {
toast({
title: "Error",
description: "Failed to revoke AutoGPT Platform API key",
variant: "destructive",
});
}
};
return (
<Card>
<CardHeader>
<CardTitle>AutoGPT Platform API Keys</CardTitle>
<CardDescription>
Manage your AutoGPT Platform API keys for programmatic access
</CardDescription>
</CardHeader>
<CardContent>
<div className="mb-4 flex justify-end">
<Dialog open={isCreateOpen} onOpenChange={setIsCreateOpen}>
<DialogTrigger asChild>
<Button>Create Key</Button>
</DialogTrigger>
<DialogContent>
<DialogHeader>
<DialogTitle>Create New API Key</DialogTitle>
<DialogDescription>
Create a new AutoGPT Platform API key
</DialogDescription>
</DialogHeader>
<div className="grid gap-4 py-4">
<div className="grid gap-2">
<Label htmlFor="name">Name</Label>
<Input
id="name"
value={newKeyName}
onChange={(e) => setNewKeyName(e.target.value)}
placeholder="My AutoGPT Platform API Key"
/>
</div>
<div className="grid gap-2">
<Label htmlFor="description">Description (Optional)</Label>
<Input
id="description"
value={newKeyDescription}
onChange={(e) => setNewKeyDescription(e.target.value)}
placeholder="Used for..."
/>
</div>
<div className="grid gap-2">
<Label>Permissions</Label>
{Object.values(APIKeyPermission).map((permission) => (
<div
className="flex items-center space-x-2"
key={permission}
>
<Checkbox
id={permission}
checked={selectedPermissions.includes(permission)}
onCheckedChange={(checked) => {
setSelectedPermissions(
checked
? [...selectedPermissions, permission]
: selectedPermissions.filter(
(p) => p !== permission,
),
);
}}
/>
<Label htmlFor={permission}>{permission}</Label>
</div>
))}
</div>
</div>
<DialogFooter>
<Button
variant="outline"
onClick={() => setIsCreateOpen(false)}
>
Cancel
</Button>
<Button onClick={handleCreateKey}>Create</Button>
</DialogFooter>
</DialogContent>
</Dialog>
<Dialog open={isKeyDialogOpen} onOpenChange={setIsKeyDialogOpen}>
<DialogContent>
<DialogHeader>
<DialogTitle>AutoGPT Platform API Key Created</DialogTitle>
<DialogDescription>
Please copy your AutoGPT API key now. You won&apos;t be able
to see it again!
</DialogDescription>
</DialogHeader>
<div className="flex items-center space-x-2">
<code className="flex-1 rounded-md bg-secondary p-2 text-sm">
{newApiKey}
</code>
<Button size="icon" variant="outline" onClick={handleCopyKey}>
<LuCopy className="h-4 w-4" />
</Button>
</div>
<DialogFooter>
<Button onClick={() => setIsKeyDialogOpen(false)}>Close</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</div>
{isLoading ? (
<div className="flex justify-center p-4">
<Loader2 className="h-6 w-6 animate-spin" />
</div>
) : (
apiKeys.length > 0 && (
<Table>
<TableHeader>
<TableRow>
<TableHead>Name</TableHead>
<TableHead>API Key</TableHead>
<TableHead>Status</TableHead>
<TableHead>Created</TableHead>
<TableHead>Last Used</TableHead>
<TableHead></TableHead>
</TableRow>
</TableHeader>
<TableBody>
{apiKeys.map((key) => (
<TableRow key={key.id}>
<TableCell>{key.name}</TableCell>
<TableCell>
<div className="rounded-md border p-1 px-2 text-xs">
{`${key.prefix}******************${key.postfix}`}
</div>
</TableCell>
<TableCell>
<Badge
variant={
key.status === "ACTIVE" ? "default" : "destructive"
}
className={
key.status === "ACTIVE"
? "border-green-600 bg-green-100 text-green-800"
: "border-red-600 bg-red-100 text-red-800"
}
>
{key.status}
</Badge>
</TableCell>
<TableCell>
{new Date(key.created_at).toLocaleDateString()}
</TableCell>
<TableCell>
{key.last_used_at
? new Date(key.last_used_at).toLocaleDateString()
: "Never"}
</TableCell>
<TableCell>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<Button variant="ghost" size="sm">
<MoreVertical className="h-4 w-4" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
<DropdownMenuItem
className="text-destructive"
onClick={() => handleRevokeKey(key.id)}
>
Revoke
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
)
)}
</CardContent>
</Card>
);
}

View File

@@ -34,8 +34,8 @@ export const AgentsSection: React.FC<AgentsSectionProps> = ({
}) => {
const router = useRouter();
// TODO: Update this when we have pagination
const displayedAgents = allAgents;
// Take only the first 9 agents
const displayedAgents = allAgents.slice(0, 9);
const handleCardClick = (creator: string, slug: string) => {
router.push(

View File

@@ -6,11 +6,9 @@ import {
Carousel,
CarouselContent,
CarouselItem,
CarouselPrevious,
CarouselNext,
CarouselIndicator,
} from "@/components/ui/carousel";
import { useCallback, useState } from "react";
import { IconLeftArrow, IconRightArrow } from "@/components/ui/icons";
import { useRouter } from "next/navigation";
const BACKGROUND_COLORS = [
@@ -65,24 +63,27 @@ export const FeaturedSection: React.FC<FeaturedSectionProps> = ({
return (
<div className="flex w-full flex-col items-center justify-center">
<div className="w-[99vw]">
<h2 className="font-poppins mx-auto mb-8 max-w-[1360px] px-4 text-2xl font-semibold leading-7 text-neutral-800 dark:text-neutral-200">
<div className="w-full">
<h2 className="font-poppins mb-8 text-2xl font-semibold leading-7 text-neutral-800 dark:text-neutral-200">
Featured agents
</h2>
<div className="w-[99vw] pb-[60px]">
<div>
<Carousel
className="mx-auto pb-10"
opts={{
align: "center",
loop: true,
startIndex: currentSlide,
duration: 500,
align: "start",
containScroll: "trimSnaps",
}}
className="w-full overflow-x-hidden"
>
<CarouselContent className="ml-[calc(50vw-690px)]">
<CarouselContent className="transition-transform duration-500">
{featuredAgents.map((agent, index) => (
<CarouselItem
key={index}
className="max-w-[460px] flex-[0_0_auto]"
className="max-w-[460px] flex-[0_0_auto] pr-8"
>
<FeaturedStoreCard
agentName={agent.agent_name}
@@ -98,13 +99,37 @@ export const FeaturedSection: React.FC<FeaturedSectionProps> = ({
</CarouselItem>
))}
</CarouselContent>
<div className="relative mx-auto w-full max-w-[1360px] pl-4">
<CarouselIndicator />
<CarouselPrevious afterClick={handlePrevSlide} />
<CarouselNext afterClick={handleNextSlide} />
</div>
</Carousel>
</div>
<div className="mt-8 flex w-full items-center justify-between">
<div className="flex h-3 items-center gap-2">
{featuredAgents.map((_, index) => (
<div
key={index}
className={`${
currentSlide === index
? "h-3 w-[52px] rounded-[39px] bg-neutral-800 transition-all duration-500 dark:bg-neutral-200"
: "h-3 w-3 rounded-full bg-neutral-300 transition-all duration-500 dark:bg-neutral-600"
}`}
/>
))}
</div>
<div className="mb-[60px] flex items-center gap-3">
<button
onClick={handlePrevSlide}
className="mb:h-12 mb:w-12 flex h-10 w-10 items-center justify-center rounded-full border border-neutral-400 bg-white dark:border-neutral-600 dark:bg-neutral-800"
>
<IconLeftArrow className="h-8 w-8 text-neutral-800 dark:text-neutral-200" />
</button>
<button
onClick={handleNextSlide}
className="mb:h-12 mb:w-12 flex h-10 w-10 items-center justify-center rounded-full border border-neutral-900 bg-white dark:border-neutral-600 dark:bg-neutral-800"
>
<IconRightArrow className="h-8 w-8 text-neutral-800 dark:text-neutral-200" />
</button>
</div>
</div>
</div>
</div>
);

View File

@@ -1,37 +0,0 @@
import { cn } from "@/lib/utils";
import Link from "next/link";
interface Props {
className?: string;
text: string;
linkText?: string;
href?: string;
}
export default function AuthBottomText({
className = "",
text,
linkText,
href = "",
}: Props) {
return (
<div
className={cn(
className,
"mt-8 inline-flex w-full items-center justify-center",
)}
>
<span className="text-sm font-medium leading-normal text-slate-950">
{text}
</span>
{linkText && (
<Link
href={href}
className="ml-1 text-sm font-medium leading-normal text-slate-950 underline"
>
{linkText}
</Link>
)}
</div>
);
}

View File

@@ -1,36 +0,0 @@
import { ReactNode } from "react";
import { Button } from "../ui/button";
import { FaSpinner } from "react-icons/fa";
interface Props {
children?: ReactNode;
onClick: () => void;
isLoading?: boolean;
disabled?: boolean;
type?: "button" | "submit" | "reset";
}
export default function AuthButton({
children,
onClick,
isLoading = false,
disabled = false,
type = "button",
}: Props) {
return (
<Button
className="mt-2 w-full self-stretch rounded-md bg-slate-900 px-4 py-2"
type={type}
disabled={isLoading || disabled}
onClick={onClick}
>
{isLoading ? (
<FaSpinner className="animate-spin" />
) : (
<div className="text-sm font-medium leading-normal text-slate-50">
{children}
</div>
)}
</Button>
);
}

View File

@@ -1,15 +0,0 @@
import { ReactNode } from "react";
interface Props {
children: ReactNode;
}
export default function AuthCard({ children }: Props) {
return (
<div className="flex h-[80vh] w-[32rem] items-center justify-center">
<div className="w-full max-w-md rounded-lg bg-white p-6 shadow-md">
{children}
</div>
</div>
);
}

View File

@@ -1,16 +0,0 @@
interface Props {
message?: string | null;
isError?: boolean;
}
export default function AuthFeedback({ message = "", isError = false }: Props) {
return (
<div className="mt-4 text-center text-sm font-medium leading-normal">
{isError ? (
<div className="text-red-500">{message}</div>
) : (
<div className="text-slate-950">{message}</div>
)}
</div>
);
}

View File

@@ -1,13 +0,0 @@
import { ReactNode } from "react";
interface Props {
children: ReactNode;
}
export default function AuthHeader({ children }: Props) {
return (
<div className="mb-8 text-2xl font-semibold leading-normal text-slate-950">
{children}
</div>
);
}

View File

@@ -1,15 +0,0 @@
import AuthBottomText from "./AuthBottomText";
import AuthButton from "./AuthButton";
import AuthCard from "./AuthCard";
import AuthFeedback from "./AuthFeedback";
import AuthHeader from "./AuthHeader";
import { PasswordInput } from "./PasswordInput";
export {
AuthBottomText,
AuthButton,
AuthCard,
AuthFeedback,
AuthHeader,
PasswordInput,
};

View File

@@ -4,22 +4,58 @@
transition: border-color 0.3s ease-in-out;
}
.custom-node [data-id="input-handles"],
.custom-node [data-id="input-handles"] {
padding: 0 1.25rem;
margin-bottom: 1rem;
}
.custom-node [data-id="input-handles"] > div > div {
margin-bottom: 1rem;
}
.handle-container {
display: flex;
position: relative;
margin-bottom: 0px;
padding: 0.75rem 1.25rem;
min-height: 44px;
height: 100%;
}
.custom-node input:not([type="checkbox"]),
.custom-node textarea,
.custom-node select,
.custom-node [data-id^="date-picker"],
.custom-node [data-list-container],
.custom-node [data-add-item],
.custom-node [data-content-settings]. .array-item-container {
.custom-node select {
width: calc(100% - 2.5rem);
max-width: 400px;
margin: 0.5rem 1.25rem;
}
.custom-node [data-id^="date-picker"] {
margin: 0.5rem 1.25rem;
width: calc(100% - 2.5rem);
}
.custom-node [data-list-container] {
margin: 0.5rem 1.25rem;
width: calc(100% - 2.5rem);
}
.custom-node [data-add-item] {
margin: 0.5rem 1.25rem;
width: calc(100% - 2.5rem);
padding: 0.5rem;
}
.array-item-container {
display: flex;
align-items: center;
min-width: calc(100% - 2.5rem);
max-width: 100%;
margin: 0.5rem 1.25rem;
width: calc(100% - 2.5rem);
}
.custom-node [data-content-settings] {
margin: 0.5rem 1.25rem;
width: calc(100% - 2.5rem);
}
.custom-node .custom-switch {
@@ -32,6 +68,7 @@
.error-message {
color: #d9534f;
font-size: 13px;
margin: 0.25rem 1.25rem;
padding-left: 0.5rem;
}

View File

@@ -1,9 +1,9 @@
/* flow.css or index.css */
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto",
"Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans",
"Helvetica Neue", sans-serif;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
sans-serif;
}
code {

Some files were not shown because too many files have changed in this diff Show More