mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-21 13:08:05 -05:00
Compare commits
24 Commits
make-old-w
...
native-aut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
87e3d7eaad | ||
|
|
974c14a7b9 | ||
|
|
af014ea19d | ||
|
|
9ecf8bcb08 | ||
|
|
a7a521cedd | ||
|
|
84244c0b56 | ||
|
|
9e83985b5b | ||
|
|
4ef3eab89d | ||
|
|
c68b53b6c1 | ||
|
|
23fb3ad8a4 | ||
|
|
175ba13ebe | ||
|
|
a415f471c6 | ||
|
|
3dd6e5cb04 | ||
|
|
3f1e66b317 | ||
|
|
8f722bd9cd | ||
|
|
65026fc9d3 | ||
|
|
af98bc1081 | ||
|
|
e92459fc5f | ||
|
|
1775286f59 | ||
|
|
f6af700f1a | ||
|
|
a80b06d459 | ||
|
|
17c9e7c8b4 | ||
|
|
f83c9391c8 | ||
|
|
7a0a90e421 |
8
.github/copilot-instructions.md
vendored
8
.github/copilot-instructions.md
vendored
@@ -142,7 +142,7 @@ pnpm storybook # Start component development server
|
|||||||
### Security & Middleware
|
### Security & Middleware
|
||||||
|
|
||||||
**Cache Protection**: Backend includes middleware preventing sensitive data caching in browsers/proxies
|
**Cache Protection**: Backend includes middleware preventing sensitive data caching in browsers/proxies
|
||||||
**Authentication**: JWT-based with Supabase integration
|
**Authentication**: JWT-based with native authentication
|
||||||
**User ID Validation**: All data access requires user ID checks - verify this for any `data/*.py` changes
|
**User ID Validation**: All data access requires user ID checks - verify this for any `data/*.py` changes
|
||||||
|
|
||||||
### Development Workflow
|
### Development Workflow
|
||||||
@@ -168,9 +168,9 @@ pnpm storybook # Start component development server
|
|||||||
|
|
||||||
- `frontend/src/app/layout.tsx` - Root application layout
|
- `frontend/src/app/layout.tsx` - Root application layout
|
||||||
- `frontend/src/app/page.tsx` - Home page
|
- `frontend/src/app/page.tsx` - Home page
|
||||||
- `frontend/src/lib/supabase/` - Authentication and database client
|
- `frontend/src/lib/auth/` - Authentication client
|
||||||
|
|
||||||
**Protected Routes**: Update `frontend/lib/supabase/middleware.ts` when adding protected routes
|
**Protected Routes**: Update `frontend/middleware.ts` when adding protected routes
|
||||||
|
|
||||||
### Agent Block System
|
### Agent Block System
|
||||||
|
|
||||||
@@ -194,7 +194,7 @@ Agents are built using a visual block-based system where each block performs a s
|
|||||||
|
|
||||||
1. **Backend**: `/backend/.env.default` → `/backend/.env` (user overrides)
|
1. **Backend**: `/backend/.env.default` → `/backend/.env` (user overrides)
|
||||||
2. **Frontend**: `/frontend/.env.default` → `/frontend/.env` (user overrides)
|
2. **Frontend**: `/frontend/.env.default` → `/frontend/.env` (user overrides)
|
||||||
3. **Platform**: `/.env.default` (Supabase/shared) → `/.env` (user overrides)
|
3. **Platform**: `/.env.default` (shared) → `/.env` (user overrides)
|
||||||
4. Docker Compose `environment:` sections override file-based config
|
4. Docker Compose `environment:` sections override file-based config
|
||||||
5. Shell environment variables have highest precedence
|
5. Shell environment variables have highest precedence
|
||||||
|
|
||||||
|
|||||||
6
.github/workflows/claude-dependabot.yml
vendored
6
.github/workflows/claude-dependabot.yml
vendored
@@ -144,11 +144,7 @@ jobs:
|
|||||||
"rabbitmq:management"
|
"rabbitmq:management"
|
||||||
"clamav/clamav-debian:latest"
|
"clamav/clamav-debian:latest"
|
||||||
"busybox:latest"
|
"busybox:latest"
|
||||||
"kong:2.8.1"
|
"pgvector/pgvector:pg18"
|
||||||
"supabase/gotrue:v2.170.0"
|
|
||||||
"supabase/postgres:15.8.1.049"
|
|
||||||
"supabase/postgres-meta:v0.86.1"
|
|
||||||
"supabase/studio:20250224-d10db0f"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||||
|
|||||||
6
.github/workflows/claude.yml
vendored
6
.github/workflows/claude.yml
vendored
@@ -160,11 +160,7 @@ jobs:
|
|||||||
"rabbitmq:management"
|
"rabbitmq:management"
|
||||||
"clamav/clamav-debian:latest"
|
"clamav/clamav-debian:latest"
|
||||||
"busybox:latest"
|
"busybox:latest"
|
||||||
"kong:2.8.1"
|
"pgvector/pgvector:pg18"
|
||||||
"supabase/gotrue:v2.170.0"
|
|
||||||
"supabase/postgres:15.8.1.049"
|
|
||||||
"supabase/postgres-meta:v0.86.1"
|
|
||||||
"supabase/studio:20250224-d10db0f"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||||
|
|||||||
6
.github/workflows/copilot-setup-steps.yml
vendored
6
.github/workflows/copilot-setup-steps.yml
vendored
@@ -142,11 +142,7 @@ jobs:
|
|||||||
"rabbitmq:management"
|
"rabbitmq:management"
|
||||||
"clamav/clamav-debian:latest"
|
"clamav/clamav-debian:latest"
|
||||||
"busybox:latest"
|
"busybox:latest"
|
||||||
"kong:2.8.1"
|
"pgvector/pgvector:pg18"
|
||||||
"supabase/gotrue:v2.170.0"
|
|
||||||
"supabase/postgres:15.8.1.049"
|
|
||||||
"supabase/postgres-meta:v0.86.1"
|
|
||||||
"supabase/studio:20250224-d10db0f"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||||
|
|||||||
44
.github/workflows/platform-backend-ci.yml
vendored
44
.github/workflows/platform-backend-ci.yml
vendored
@@ -2,13 +2,13 @@ name: AutoGPT Platform - Backend CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev, ci-test*]
|
branches: [master, dev, ci-test*, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-backend-ci.yml"
|
- ".github/workflows/platform-backend-ci.yml"
|
||||||
- "autogpt_platform/backend/**"
|
- "autogpt_platform/backend/**"
|
||||||
- "autogpt_platform/autogpt_libs/**"
|
- "autogpt_platform/autogpt_libs/**"
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [master, dev, release-*]
|
branches: [master, dev, release-*, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-backend-ci.yml"
|
- ".github/workflows/platform-backend-ci.yml"
|
||||||
- "autogpt_platform/backend/**"
|
- "autogpt_platform/backend/**"
|
||||||
@@ -36,6 +36,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
postgres:
|
||||||
|
image: pgvector/pgvector:pg18
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
options: >-
|
||||||
|
--health-cmd "pg_isready -U postgres"
|
||||||
|
--health-interval 5s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 10
|
||||||
redis:
|
redis:
|
||||||
image: redis:latest
|
image: redis:latest
|
||||||
ports:
|
ports:
|
||||||
@@ -78,11 +91,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
- name: Setup Supabase
|
|
||||||
uses: supabase/setup-cli@v1
|
|
||||||
with:
|
|
||||||
version: 1.178.1
|
|
||||||
|
|
||||||
- id: get_date
|
- id: get_date
|
||||||
name: Get date
|
name: Get date
|
||||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||||
@@ -136,16 +144,6 @@ jobs:
|
|||||||
- name: Generate Prisma Client
|
- name: Generate Prisma Client
|
||||||
run: poetry run prisma generate
|
run: poetry run prisma generate
|
||||||
|
|
||||||
- id: supabase
|
|
||||||
name: Start Supabase
|
|
||||||
working-directory: .
|
|
||||||
run: |
|
|
||||||
supabase init
|
|
||||||
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
|
|
||||||
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
|
|
||||||
# outputs:
|
|
||||||
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
|
|
||||||
|
|
||||||
- name: Wait for ClamAV to be ready
|
- name: Wait for ClamAV to be ready
|
||||||
run: |
|
run: |
|
||||||
echo "Waiting for ClamAV daemon to start..."
|
echo "Waiting for ClamAV daemon to start..."
|
||||||
@@ -178,8 +176,8 @@ jobs:
|
|||||||
- name: Run Database Migrations
|
- name: Run Database Migrations
|
||||||
run: poetry run prisma migrate dev --name updates
|
run: poetry run prisma migrate dev --name updates
|
||||||
env:
|
env:
|
||||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||||
|
|
||||||
- id: lint
|
- id: lint
|
||||||
name: Run Linter
|
name: Run Linter
|
||||||
@@ -195,11 +193,9 @@ jobs:
|
|||||||
if: success() || (failure() && steps.lint.outcome == 'failure')
|
if: success() || (failure() && steps.lint.outcome == 'failure')
|
||||||
env:
|
env:
|
||||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
|
||||||
JWT_VERIFY_KEY: ${{ steps.supabase.outputs.JWT_SECRET }}
|
|
||||||
REDIS_HOST: "localhost"
|
REDIS_HOST: "localhost"
|
||||||
REDIS_PORT: "6379"
|
REDIS_PORT: "6379"
|
||||||
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=" # DO NOT USE IN PRODUCTION!!
|
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=" # DO NOT USE IN PRODUCTION!!
|
||||||
|
|||||||
5
.github/workflows/platform-frontend-ci.yml
vendored
5
.github/workflows/platform-frontend-ci.yml
vendored
@@ -2,11 +2,12 @@ name: AutoGPT Platform - Frontend CI
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev]
|
branches: [master, dev, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-frontend-ci.yml"
|
- ".github/workflows/platform-frontend-ci.yml"
|
||||||
- "autogpt_platform/frontend/**"
|
- "autogpt_platform/frontend/**"
|
||||||
pull_request:
|
pull_request:
|
||||||
|
branches: [master, dev, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-frontend-ci.yml"
|
- ".github/workflows/platform-frontend-ci.yml"
|
||||||
- "autogpt_platform/frontend/**"
|
- "autogpt_platform/frontend/**"
|
||||||
@@ -147,7 +148,7 @@ jobs:
|
|||||||
- name: Enable corepack
|
- name: Enable corepack
|
||||||
run: corepack enable
|
run: corepack enable
|
||||||
|
|
||||||
- name: Copy default supabase .env
|
- name: Copy default platform .env
|
||||||
run: |
|
run: |
|
||||||
cp ../.env.default ../.env
|
cp ../.env.default ../.env
|
||||||
|
|
||||||
|
|||||||
56
.github/workflows/platform-fullstack-ci.yml
vendored
56
.github/workflows/platform-fullstack-ci.yml
vendored
@@ -1,12 +1,13 @@
|
|||||||
name: AutoGPT Platform - Frontend CI
|
name: AutoGPT Platform - Fullstack CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev]
|
branches: [master, dev, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-fullstack-ci.yml"
|
- ".github/workflows/platform-fullstack-ci.yml"
|
||||||
- "autogpt_platform/**"
|
- "autogpt_platform/**"
|
||||||
pull_request:
|
pull_request:
|
||||||
|
branches: [master, dev, native-auth]
|
||||||
paths:
|
paths:
|
||||||
- ".github/workflows/platform-fullstack-ci.yml"
|
- ".github/workflows/platform-fullstack-ci.yml"
|
||||||
- "autogpt_platform/**"
|
- "autogpt_platform/**"
|
||||||
@@ -58,14 +59,11 @@ jobs:
|
|||||||
types:
|
types:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: setup
|
needs: setup
|
||||||
strategy:
|
timeout-minutes: 10
|
||||||
fail-fast: false
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v4
|
||||||
@@ -75,18 +73,6 @@ jobs:
|
|||||||
- name: Enable corepack
|
- name: Enable corepack
|
||||||
run: corepack enable
|
run: corepack enable
|
||||||
|
|
||||||
- name: Copy default supabase .env
|
|
||||||
run: |
|
|
||||||
cp ../.env.default ../.env
|
|
||||||
|
|
||||||
- name: Copy backend .env
|
|
||||||
run: |
|
|
||||||
cp ../backend/.env.default ../backend/.env
|
|
||||||
|
|
||||||
- name: Run docker compose
|
|
||||||
run: |
|
|
||||||
docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d
|
|
||||||
|
|
||||||
- name: Restore dependencies cache
|
- name: Restore dependencies cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -101,36 +87,12 @@ jobs:
|
|||||||
- name: Setup .env
|
- name: Setup .env
|
||||||
run: cp .env.default .env
|
run: cp .env.default .env
|
||||||
|
|
||||||
- name: Wait for services to be ready
|
|
||||||
run: |
|
|
||||||
echo "Waiting for rest_server to be ready..."
|
|
||||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
|
||||||
echo "Waiting for database to be ready..."
|
|
||||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
|
||||||
|
|
||||||
- name: Generate API queries
|
- name: Generate API queries
|
||||||
run: pnpm generate:api:force
|
run: pnpm generate:api
|
||||||
|
|
||||||
- name: Check for API schema changes
|
|
||||||
run: |
|
|
||||||
if ! git diff --exit-code src/app/api/openapi.json; then
|
|
||||||
echo "❌ API schema changes detected in src/app/api/openapi.json"
|
|
||||||
echo ""
|
|
||||||
echo "The openapi.json file has been modified after running 'pnpm generate:api-all'."
|
|
||||||
echo "This usually means changes have been made in the BE endpoints without updating the Frontend."
|
|
||||||
echo "The API schema is now out of sync with the Front-end queries."
|
|
||||||
echo ""
|
|
||||||
echo "To fix this:"
|
|
||||||
echo "1. Pull the backend 'docker compose pull && docker compose up -d --build --force-recreate'"
|
|
||||||
echo "2. Run 'pnpm generate:api' locally"
|
|
||||||
echo "3. Run 'pnpm types' locally"
|
|
||||||
echo "4. Fix any TypeScript errors that may have been introduced"
|
|
||||||
echo "5. Commit and push your changes"
|
|
||||||
echo ""
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "✅ No API schema changes detected"
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Run Typescript checks
|
- name: Run Typescript checks
|
||||||
run: pnpm types
|
run: pnpm types
|
||||||
|
|
||||||
|
env:
|
||||||
|
CI: true
|
||||||
|
PLAIN_OUTPUT: True
|
||||||
|
|||||||
@@ -49,5 +49,5 @@ Use conventional commit messages for all commits (e.g. `feat(backend): add API`)
|
|||||||
- Keep out-of-scope changes under 20% of the PR.
|
- Keep out-of-scope changes under 20% of the PR.
|
||||||
- Ensure PR descriptions are complete.
|
- Ensure PR descriptions are complete.
|
||||||
- For changes touching `data/*.py`, validate user ID checks or explain why not needed.
|
- For changes touching `data/*.py`, validate user ID checks or explain why not needed.
|
||||||
- If adding protected frontend routes, update `frontend/lib/supabase/middleware.ts`.
|
- If adding protected frontend routes, update `frontend/lib/auth/helpers.ts`.
|
||||||
- Use the linear ticket branch structure if given codex/open-1668-resume-dropped-runs
|
- Use the linear ticket branch structure if given codex/open-1668-resume-dropped-runs
|
||||||
|
|||||||
@@ -5,12 +5,6 @@
|
|||||||
|
|
||||||
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||||
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||||
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
|
||||||
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
|
||||||
DASHBOARD_USERNAME=supabase
|
|
||||||
DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
|
|
||||||
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
|
||||||
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
|
||||||
|
|
||||||
|
|
||||||
############
|
############
|
||||||
@@ -24,100 +18,31 @@ POSTGRES_PORT=5432
|
|||||||
|
|
||||||
|
|
||||||
############
|
############
|
||||||
# Supavisor -- Database pooler
|
# Auth - Native authentication configuration
|
||||||
############
|
|
||||||
POOLER_PROXY_PORT_TRANSACTION=6543
|
|
||||||
POOLER_DEFAULT_POOL_SIZE=20
|
|
||||||
POOLER_MAX_CLIENT_CONN=100
|
|
||||||
POOLER_TENANT_ID=your-tenant-id
|
|
||||||
|
|
||||||
|
|
||||||
############
|
|
||||||
# API Proxy - Configuration for the Kong Reverse proxy.
|
|
||||||
############
|
############
|
||||||
|
|
||||||
KONG_HTTP_PORT=8000
|
|
||||||
KONG_HTTPS_PORT=8443
|
|
||||||
|
|
||||||
|
|
||||||
############
|
|
||||||
# API - Configuration for PostgREST.
|
|
||||||
############
|
|
||||||
|
|
||||||
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
|
||||||
|
|
||||||
|
|
||||||
############
|
|
||||||
# Auth - Configuration for the GoTrue authentication server.
|
|
||||||
############
|
|
||||||
|
|
||||||
## General
|
|
||||||
SITE_URL=http://localhost:3000
|
SITE_URL=http://localhost:3000
|
||||||
ADDITIONAL_REDIRECT_URLS=
|
|
||||||
JWT_EXPIRY=3600
|
|
||||||
DISABLE_SIGNUP=false
|
|
||||||
API_EXTERNAL_URL=http://localhost:8000
|
|
||||||
|
|
||||||
## Mailer Config
|
# JWT token configuration
|
||||||
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
ACCESS_TOKEN_EXPIRE_MINUTES=15
|
||||||
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||||
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
JWT_ISSUER=autogpt-platform
|
||||||
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
|
||||||
|
|
||||||
## Email auth
|
# Google OAuth (optional)
|
||||||
ENABLE_EMAIL_SIGNUP=true
|
GOOGLE_CLIENT_ID=
|
||||||
ENABLE_EMAIL_AUTOCONFIRM=false
|
GOOGLE_CLIENT_SECRET=
|
||||||
SMTP_ADMIN_EMAIL=admin@example.com
|
|
||||||
SMTP_HOST=supabase-mail
|
|
||||||
SMTP_PORT=2500
|
|
||||||
SMTP_USER=fake_mail_user
|
|
||||||
SMTP_PASS=fake_mail_password
|
|
||||||
SMTP_SENDER_NAME=fake_sender
|
|
||||||
ENABLE_ANONYMOUS_USERS=false
|
|
||||||
|
|
||||||
## Phone auth
|
|
||||||
ENABLE_PHONE_SIGNUP=true
|
|
||||||
ENABLE_PHONE_AUTOCONFIRM=true
|
|
||||||
|
|
||||||
|
|
||||||
############
|
############
|
||||||
# Studio - Configuration for the Dashboard
|
# Email configuration (optional)
|
||||||
############
|
############
|
||||||
|
|
||||||
STUDIO_DEFAULT_ORGANIZATION=Default Organization
|
SMTP_HOST=
|
||||||
STUDIO_DEFAULT_PROJECT=Default Project
|
SMTP_PORT=587
|
||||||
|
SMTP_USER=
|
||||||
|
SMTP_PASS=
|
||||||
|
SMTP_FROM_EMAIL=noreply@example.com
|
||||||
|
|
||||||
STUDIO_PORT=3000
|
|
||||||
# replace if you intend to use Studio outside of localhost
|
|
||||||
SUPABASE_PUBLIC_URL=http://localhost:8000
|
|
||||||
|
|
||||||
# Enable webp support
|
|
||||||
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
|
||||||
|
|
||||||
# Add your OpenAI API key to enable SQL Editor Assistant
|
|
||||||
OPENAI_API_KEY=
|
|
||||||
|
|
||||||
|
|
||||||
############
|
|
||||||
# Functions - Configuration for Functions
|
|
||||||
############
|
|
||||||
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
|
|
||||||
FUNCTIONS_VERIFY_JWT=false
|
|
||||||
|
|
||||||
|
|
||||||
############
|
|
||||||
# Logs - Configuration for Logflare
|
|
||||||
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
|
|
||||||
############
|
|
||||||
|
|
||||||
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
|
||||||
|
|
||||||
# Change vector.toml sinks to reflect this change
|
|
||||||
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
|
||||||
|
|
||||||
# Docker socket location - this value will differ depending on your OS
|
# Docker socket location - this value will differ depending on your OS
|
||||||
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
||||||
|
|
||||||
# Google Cloud Project details
|
|
||||||
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
|
||||||
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
.PHONY: start-core stop-core logs-core format lint migrate run-backend run-frontend load-store-agents
|
.PHONY: start-core stop-core logs-core format lint migrate run-backend run-frontend load-store-agents
|
||||||
|
|
||||||
# Run just Supabase + Redis + RabbitMQ
|
# Run just PostgreSQL + Redis + RabbitMQ + ClamAV
|
||||||
start-core:
|
start-core:
|
||||||
docker compose up -d deps
|
docker compose up -d deps
|
||||||
|
|
||||||
@@ -49,7 +49,7 @@ load-store-agents:
|
|||||||
help:
|
help:
|
||||||
@echo "Usage: make <target>"
|
@echo "Usage: make <target>"
|
||||||
@echo "Targets:"
|
@echo "Targets:"
|
||||||
@echo " start-core - Start just the core services (Supabase, Redis, RabbitMQ) in background"
|
@echo " start-core - Start just the core services (PostgreSQL, Redis, RabbitMQ, ClamAV) in background"
|
||||||
@echo " stop-core - Stop the core services"
|
@echo " stop-core - Stop the core services"
|
||||||
@echo " reset-db - Reset the database by deleting the volume"
|
@echo " reset-db - Reset the database by deleting the volume"
|
||||||
@echo " logs-core - Tail the logs for core services"
|
@echo " logs-core - Tail the logs for core services"
|
||||||
|
|||||||
@@ -16,17 +16,37 @@ ALGO_RECOMMENDATION = (
|
|||||||
"We highly recommend using an asymmetric algorithm such as ES256, "
|
"We highly recommend using an asymmetric algorithm such as ES256, "
|
||||||
"because when leaked, a shared secret would allow anyone to "
|
"because when leaked, a shared secret would allow anyone to "
|
||||||
"forge valid tokens and impersonate users. "
|
"forge valid tokens and impersonate users. "
|
||||||
"More info: https://supabase.com/docs/guides/auth/signing-keys#choosing-the-right-signing-algorithm" # noqa
|
"More info: https://pyjwt.readthedocs.io/en/stable/algorithms.html"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Settings:
|
class Settings:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
# JWT verification key (public key for asymmetric, shared secret for symmetric)
|
||||||
self.JWT_VERIFY_KEY: str = os.getenv(
|
self.JWT_VERIFY_KEY: str = os.getenv(
|
||||||
"JWT_VERIFY_KEY", os.getenv("SUPABASE_JWT_SECRET", "")
|
"JWT_VERIFY_KEY", os.getenv("SUPABASE_JWT_SECRET", "")
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
|
# JWT signing key (private key for asymmetric, shared secret for symmetric)
|
||||||
|
# Falls back to JWT_VERIFY_KEY for symmetric algorithms like HS256
|
||||||
|
self.JWT_SIGN_KEY: str = os.getenv("JWT_SIGN_KEY", self.JWT_VERIFY_KEY).strip()
|
||||||
|
|
||||||
self.JWT_ALGORITHM: str = os.getenv("JWT_SIGN_ALGORITHM", "HS256").strip()
|
self.JWT_ALGORITHM: str = os.getenv("JWT_SIGN_ALGORITHM", "HS256").strip()
|
||||||
|
|
||||||
|
# Token expiration settings
|
||||||
|
self.ACCESS_TOKEN_EXPIRE_MINUTES: int = int(
|
||||||
|
os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "15")
|
||||||
|
)
|
||||||
|
self.REFRESH_TOKEN_EXPIRE_DAYS: int = int(
|
||||||
|
os.getenv("REFRESH_TOKEN_EXPIRE_DAYS", "7")
|
||||||
|
)
|
||||||
|
|
||||||
|
# JWT issuer claim
|
||||||
|
self.JWT_ISSUER: str = os.getenv("JWT_ISSUER", "autogpt-platform").strip()
|
||||||
|
|
||||||
|
# JWT audience claim
|
||||||
|
self.JWT_AUDIENCE: str = os.getenv("JWT_AUDIENCE", "authenticated").strip()
|
||||||
|
|
||||||
self.validate()
|
self.validate()
|
||||||
|
|
||||||
def validate(self):
|
def validate(self):
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
import secrets
|
||||||
|
import uuid
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import jwt
|
import jwt
|
||||||
@@ -16,6 +20,57 @@ bearer_jwt_auth = HTTPBearer(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_access_token(
|
||||||
|
user_id: str,
|
||||||
|
email: str,
|
||||||
|
role: str = "authenticated",
|
||||||
|
email_verified: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Generate a new JWT access token.
|
||||||
|
|
||||||
|
:param user_id: The user's unique identifier
|
||||||
|
:param email: The user's email address
|
||||||
|
:param role: The user's role (default: "authenticated")
|
||||||
|
:param email_verified: Whether the user's email is verified
|
||||||
|
:return: Encoded JWT token
|
||||||
|
"""
|
||||||
|
settings = get_settings()
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
"sub": user_id,
|
||||||
|
"email": email,
|
||||||
|
"role": role,
|
||||||
|
"email_verified": email_verified,
|
||||||
|
"aud": settings.JWT_AUDIENCE,
|
||||||
|
"iss": settings.JWT_ISSUER,
|
||||||
|
"iat": now,
|
||||||
|
"exp": now + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES),
|
||||||
|
"jti": str(uuid.uuid4()), # Unique token ID
|
||||||
|
}
|
||||||
|
|
||||||
|
return jwt.encode(payload, settings.JWT_SIGN_KEY, algorithm=settings.JWT_ALGORITHM)
|
||||||
|
|
||||||
|
|
||||||
|
def create_refresh_token() -> tuple[str, str]:
|
||||||
|
"""
|
||||||
|
Generate a new refresh token.
|
||||||
|
|
||||||
|
Returns a tuple of (raw_token, hashed_token).
|
||||||
|
The raw token should be sent to the client.
|
||||||
|
The hashed token should be stored in the database.
|
||||||
|
"""
|
||||||
|
raw_token = secrets.token_urlsafe(64)
|
||||||
|
hashed_token = hashlib.sha256(raw_token.encode()).hexdigest()
|
||||||
|
return raw_token, hashed_token
|
||||||
|
|
||||||
|
|
||||||
|
def hash_token(token: str) -> str:
|
||||||
|
"""Hash a token using SHA-256."""
|
||||||
|
return hashlib.sha256(token.encode()).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
async def get_jwt_payload(
|
async def get_jwt_payload(
|
||||||
credentials: HTTPAuthorizationCredentials | None = Security(bearer_jwt_auth),
|
credentials: HTTPAuthorizationCredentials | None = Security(bearer_jwt_auth),
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
@@ -52,11 +107,19 @@ def parse_jwt_token(token: str) -> dict[str, Any]:
|
|||||||
"""
|
"""
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
try:
|
try:
|
||||||
|
# Build decode options
|
||||||
|
options = {
|
||||||
|
"verify_aud": True,
|
||||||
|
"verify_iss": bool(settings.JWT_ISSUER),
|
||||||
|
}
|
||||||
|
|
||||||
payload = jwt.decode(
|
payload = jwt.decode(
|
||||||
token,
|
token,
|
||||||
settings.JWT_VERIFY_KEY,
|
settings.JWT_VERIFY_KEY,
|
||||||
algorithms=[settings.JWT_ALGORITHM],
|
algorithms=[settings.JWT_ALGORITHM],
|
||||||
audience="authenticated",
|
audience=settings.JWT_AUDIENCE,
|
||||||
|
issuer=settings.JWT_ISSUER if settings.JWT_ISSUER else None,
|
||||||
|
options=options,
|
||||||
)
|
)
|
||||||
return payload
|
return payload
|
||||||
except jwt.ExpiredSignatureError:
|
except jwt.ExpiredSignatureError:
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ class User:
|
|||||||
email: str
|
email: str
|
||||||
phone_number: str
|
phone_number: str
|
||||||
role: str
|
role: str
|
||||||
|
email_verified: bool = False
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_payload(cls, payload):
|
def from_payload(cls, payload):
|
||||||
@@ -18,5 +19,6 @@ class User:
|
|||||||
user_id=payload["sub"],
|
user_id=payload["sub"],
|
||||||
email=payload.get("email", ""),
|
email=payload.get("email", ""),
|
||||||
phone_number=payload.get("phone", ""),
|
phone_number=payload.get("phone", ""),
|
||||||
role=payload["role"],
|
role=payload.get("role", "authenticated"),
|
||||||
|
email_verified=payload.get("email_verified", False),
|
||||||
)
|
)
|
||||||
|
|||||||
414
autogpt_platform/autogpt_libs/poetry.lock
generated
414
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -48,6 +48,21 @@ files = [
|
|||||||
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
|
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "authlib"
|
||||||
|
version = "1.6.6"
|
||||||
|
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
files = [
|
||||||
|
{file = "authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd"},
|
||||||
|
{file = "authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
cryptography = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "backports-asyncio-runner"
|
name = "backports-asyncio-runner"
|
||||||
version = "1.2.0"
|
version = "1.2.0"
|
||||||
@@ -61,6 +76,71 @@ files = [
|
|||||||
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
|
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bcrypt"
|
||||||
|
version = "4.3.0"
|
||||||
|
description = "Modern password hashing for your software and your servers"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
groups = ["main"]
|
||||||
|
files = [
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"},
|
||||||
|
{file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||||
|
typecheck = ["mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cachetools"
|
name = "cachetools"
|
||||||
version = "5.5.2"
|
version = "5.5.2"
|
||||||
@@ -459,21 +539,6 @@ ssh = ["bcrypt (>=3.1.5)"]
|
|||||||
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||||
test-randomorder = ["pytest-randomly"]
|
test-randomorder = ["pytest-randomly"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "deprecation"
|
|
||||||
version = "2.1.0"
|
|
||||||
description = "A library to handle automated deprecations"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
|
|
||||||
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
packaging = "*"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "exceptiongroup"
|
name = "exceptiongroup"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
@@ -695,23 +760,6 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
grpc = ["grpcio (>=1.44.0,<2.0.0)"]
|
grpc = ["grpcio (>=1.44.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gotrue"
|
|
||||||
version = "2.12.3"
|
|
||||||
description = "Python Client Library for Supabase Auth"
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "gotrue-2.12.3-py3-none-any.whl", hash = "sha256:b1a3c6a5fe3f92e854a026c4c19de58706a96fd5fbdcc3d620b2802f6a46a26b"},
|
|
||||||
{file = "gotrue-2.12.3.tar.gz", hash = "sha256:f874cf9d0b2f0335bfbd0d6e29e3f7aff79998cd1c14d2ad814db8c06cee3852"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
pydantic = ">=1.10,<3"
|
|
||||||
pyjwt = ">=2.10.1,<3.0.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "grpc-google-iam-v1"
|
name = "grpc-google-iam-v1"
|
||||||
version = "0.14.2"
|
version = "0.14.2"
|
||||||
@@ -822,94 +870,6 @@ files = [
|
|||||||
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
|
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "h2"
|
|
||||||
version = "4.2.0"
|
|
||||||
description = "Pure-Python HTTP/2 protocol implementation"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"},
|
|
||||||
{file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
hpack = ">=4.1,<5"
|
|
||||||
hyperframe = ">=6.1,<7"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hpack"
|
|
||||||
version = "4.1.0"
|
|
||||||
description = "Pure-Python HPACK header encoding"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"},
|
|
||||||
{file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "httpcore"
|
|
||||||
version = "1.0.9"
|
|
||||||
description = "A minimal low-level HTTP client."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
|
|
||||||
{file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
certifi = "*"
|
|
||||||
h11 = ">=0.16"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
asyncio = ["anyio (>=4.0,<5.0)"]
|
|
||||||
http2 = ["h2 (>=3,<5)"]
|
|
||||||
socks = ["socksio (==1.*)"]
|
|
||||||
trio = ["trio (>=0.22.0,<1.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "httpx"
|
|
||||||
version = "0.28.1"
|
|
||||||
description = "The next generation HTTP client."
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.8"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
|
|
||||||
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
anyio = "*"
|
|
||||||
certifi = "*"
|
|
||||||
h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""}
|
|
||||||
httpcore = "==1.*"
|
|
||||||
idna = "*"
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
|
||||||
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
|
||||||
http2 = ["h2 (>=3,<5)"]
|
|
||||||
socks = ["socksio (==1.*)"]
|
|
||||||
zstd = ["zstandard (>=0.18.0)"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "hyperframe"
|
|
||||||
version = "6.1.0"
|
|
||||||
description = "Pure-Python HTTP/2 framing"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"},
|
|
||||||
{file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.10"
|
version = "3.10"
|
||||||
@@ -1036,7 +996,7 @@ version = "25.0"
|
|||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
groups = ["main", "dev"]
|
groups = ["dev"]
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
|
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
|
||||||
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
|
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
|
||||||
@@ -1058,24 +1018,6 @@ files = [
|
|||||||
dev = ["pre-commit", "tox"]
|
dev = ["pre-commit", "tox"]
|
||||||
testing = ["coverage", "pytest", "pytest-benchmark"]
|
testing = ["coverage", "pytest", "pytest-benchmark"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "postgrest"
|
|
||||||
version = "1.1.1"
|
|
||||||
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a"},
|
|
||||||
{file = "postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
deprecation = ">=2.1.0,<3.0.0"
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
pydantic = ">=1.9,<3.0"
|
|
||||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proto-plus"
|
name = "proto-plus"
|
||||||
version = "1.26.1"
|
version = "1.26.1"
|
||||||
@@ -1462,21 +1404,6 @@ pytest = ">=6.2.5"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "python-dateutil"
|
|
||||||
version = "2.9.0.post0"
|
|
||||||
description = "Extensions to the standard Python datetime module"
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
|
||||||
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
six = ">=1.5"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dotenv"
|
name = "python-dotenv"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -1492,22 +1419,6 @@ files = [
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
cli = ["click (>=5.0)"]
|
cli = ["click (>=5.0)"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "realtime"
|
|
||||||
version = "2.5.3"
|
|
||||||
description = ""
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "realtime-2.5.3-py3-none-any.whl", hash = "sha256:eb0994636946eff04c4c7f044f980c8c633c7eb632994f549f61053a474ac970"},
|
|
||||||
{file = "realtime-2.5.3.tar.gz", hash = "sha256:0587594f3bc1c84bf007ff625075b86db6528843e03250dc84f4f2808be3d99a"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
typing-extensions = ">=4.14.0,<5.0.0"
|
|
||||||
websockets = ">=11,<16"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redis"
|
name = "redis"
|
||||||
version = "6.2.0"
|
version = "6.2.0"
|
||||||
@@ -1606,18 +1517,6 @@ files = [
|
|||||||
{file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"},
|
{file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "six"
|
|
||||||
version = "1.17.0"
|
|
||||||
description = "Python 2 and 3 compatibility utilities"
|
|
||||||
optional = false
|
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
|
|
||||||
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sniffio"
|
name = "sniffio"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
@@ -1649,76 +1548,6 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
|
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "storage3"
|
|
||||||
version = "0.12.0"
|
|
||||||
description = "Supabase Storage client for Python."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "storage3-0.12.0-py3-none-any.whl", hash = "sha256:1c4585693ca42243ded1512b58e54c697111e91a20916cd14783eebc37e7c87d"},
|
|
||||||
{file = "storage3-0.12.0.tar.gz", hash = "sha256:94243f20922d57738bf42e96b9f5582b4d166e8bf209eccf20b146909f3f71b0"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
deprecation = ">=2.1.0,<3.0.0"
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
python-dateutil = ">=2.8.2,<3.0.0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "strenum"
|
|
||||||
version = "0.4.15"
|
|
||||||
description = "An Enum that inherits from str."
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
|
|
||||||
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.extras]
|
|
||||||
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
|
|
||||||
release = ["twine"]
|
|
||||||
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supabase"
|
|
||||||
version = "2.16.0"
|
|
||||||
description = "Supabase client for Python."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "supabase-2.16.0-py3-none-any.whl", hash = "sha256:99065caab3d90a56650bf39fbd0e49740995da3738ab28706c61bd7f2401db55"},
|
|
||||||
{file = "supabase-2.16.0.tar.gz", hash = "sha256:98f3810158012d4ec0e3083f2e5515f5e10b32bd71e7d458662140e963c1d164"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
gotrue = ">=2.11.0,<3.0.0"
|
|
||||||
httpx = ">=0.26,<0.29"
|
|
||||||
postgrest = ">0.19,<1.2"
|
|
||||||
realtime = ">=2.4.0,<2.6.0"
|
|
||||||
storage3 = ">=0.10,<0.13"
|
|
||||||
supafunc = ">=0.9,<0.11"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supafunc"
|
|
||||||
version = "0.10.1"
|
|
||||||
description = "Library for Supabase Functions"
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "supafunc-0.10.1-py3-none-any.whl", hash = "sha256:26df9bd25ff2ef56cb5bfb8962de98f43331f7f8ff69572bac3ed9c3a9672040"},
|
|
||||||
{file = "supafunc-0.10.1.tar.gz", hash = "sha256:a5b33c8baecb6b5297d25da29a2503e2ec67ee6986f3d44c137e651b8a59a17d"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
strenum = ">=0.4.15,<0.5.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tomli"
|
name = "tomli"
|
||||||
version = "2.2.1"
|
version = "2.2.1"
|
||||||
@@ -1827,85 +1656,6 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "websockets"
|
|
||||||
version = "15.0.1"
|
|
||||||
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"},
|
|
||||||
{file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"},
|
|
||||||
{file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"},
|
|
||||||
{file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"},
|
|
||||||
{file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"},
|
|
||||||
{file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"},
|
|
||||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"},
|
|
||||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"},
|
|
||||||
{file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"},
|
|
||||||
{file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zipp"
|
name = "zipp"
|
||||||
version = "3.23.0"
|
version = "3.23.0"
|
||||||
@@ -1929,4 +1679,4 @@ type = ["pytest-mypy"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = ">=3.10,<4.0"
|
python-versions = ">=3.10,<4.0"
|
||||||
content-hash = "0c40b63c3c921846cf05ccfb4e685d4959854b29c2c302245f9832e20aac6954"
|
content-hash = "de209c97aa0feb29d669a20e4422d51bdf3a0872ec37e85ce9b88ce726fcee7a"
|
||||||
|
|||||||
@@ -18,7 +18,8 @@ pydantic = "^2.11.7"
|
|||||||
pydantic-settings = "^2.10.1"
|
pydantic-settings = "^2.10.1"
|
||||||
pyjwt = { version = "^2.10.1", extras = ["crypto"] }
|
pyjwt = { version = "^2.10.1", extras = ["crypto"] }
|
||||||
redis = "^6.2.0"
|
redis = "^6.2.0"
|
||||||
supabase = "^2.16.0"
|
bcrypt = "^4.1.0"
|
||||||
|
authlib = "^1.3.0"
|
||||||
uvicorn = "^0.35.0"
|
uvicorn = "^0.35.0"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
|||||||
@@ -27,10 +27,15 @@ REDIS_PORT=6379
|
|||||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||||
|
|
||||||
# Supabase Authentication
|
# JWT Authentication
|
||||||
SUPABASE_URL=http://localhost:8000
|
# Generate a secure random key: python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
JWT_SIGN_KEY=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||||
JWT_VERIFY_KEY=your-super-secret-jwt-token-with-at-least-32-characters-long
|
JWT_VERIFY_KEY=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||||
|
JWT_SIGN_ALGORITHM=HS256
|
||||||
|
ACCESS_TOKEN_EXPIRE_MINUTES=15
|
||||||
|
REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||||
|
JWT_ISSUER=autogpt-platform
|
||||||
|
JWT_AUDIENCE=authenticated
|
||||||
|
|
||||||
## ===== REQUIRED SECURITY KEYS ===== ##
|
## ===== REQUIRED SECURITY KEYS ===== ##
|
||||||
# Generate using: from cryptography.fernet import Fernet;Fernet.generate_key().decode()
|
# Generate using: from cryptography.fernet import Fernet;Fernet.generate_key().decode()
|
||||||
|
|||||||
3
autogpt_platform/backend/.gitignore
vendored
3
autogpt_platform/backend/.gitignore
vendored
@@ -18,3 +18,6 @@ load-tests/results/
|
|||||||
load-tests/*.json
|
load-tests/*.json
|
||||||
load-tests/*.log
|
load-tests/*.log
|
||||||
load-tests/node_modules/*
|
load-tests/node_modules/*
|
||||||
|
|
||||||
|
# Migration backups (contain user data)
|
||||||
|
migration_backups/
|
||||||
|
|||||||
@@ -319,7 +319,7 @@ class CostDollars(BaseModel):
|
|||||||
|
|
||||||
# Helper functions for payload processing
|
# Helper functions for payload processing
|
||||||
def process_text_field(
|
def process_text_field(
|
||||||
text: Union[bool, TextEnabled, TextDisabled, TextAdvanced, None]
|
text: Union[bool, TextEnabled, TextDisabled, TextAdvanced, None],
|
||||||
) -> Optional[Union[bool, Dict[str, Any]]]:
|
) -> Optional[Union[bool, Dict[str, Any]]]:
|
||||||
"""Process text field for API payload."""
|
"""Process text field for API payload."""
|
||||||
if text is None:
|
if text is None:
|
||||||
@@ -400,7 +400,7 @@ def process_contents_settings(contents: Optional[ContentSettings]) -> Dict[str,
|
|||||||
|
|
||||||
|
|
||||||
def process_context_field(
|
def process_context_field(
|
||||||
context: Union[bool, dict, ContextEnabled, ContextDisabled, ContextAdvanced, None]
|
context: Union[bool, dict, ContextEnabled, ContextDisabled, ContextAdvanced, None],
|
||||||
) -> Optional[Union[bool, Dict[str, int]]]:
|
) -> Optional[Union[bool, Dict[str, int]]]:
|
||||||
"""Process context field for API payload."""
|
"""Process context field for API payload."""
|
||||||
if context is None:
|
if context is None:
|
||||||
|
|||||||
@@ -36,12 +36,13 @@ import secrets
|
|||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Optional
|
from typing import Optional, cast
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||||
from prisma.enums import APIKeyPermission
|
from prisma.enums import APIKeyPermission
|
||||||
|
from prisma.types import OAuthApplicationCreateInput
|
||||||
|
|
||||||
keysmith = APIKeySmith()
|
keysmith = APIKeySmith()
|
||||||
|
|
||||||
@@ -834,19 +835,22 @@ async def create_test_app_in_db(
|
|||||||
|
|
||||||
# Insert into database
|
# Insert into database
|
||||||
app = await OAuthApplication.prisma().create(
|
app = await OAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": creds["id"],
|
OAuthApplicationCreateInput,
|
||||||
"name": creds["name"],
|
{
|
||||||
"description": creds["description"],
|
"id": creds["id"],
|
||||||
"clientId": creds["client_id"],
|
"name": creds["name"],
|
||||||
"clientSecret": creds["client_secret_hash"],
|
"description": creds["description"],
|
||||||
"clientSecretSalt": creds["client_secret_salt"],
|
"clientId": creds["client_id"],
|
||||||
"redirectUris": creds["redirect_uris"],
|
"clientSecret": creds["client_secret_hash"],
|
||||||
"grantTypes": creds["grant_types"],
|
"clientSecretSalt": creds["client_secret_salt"],
|
||||||
"scopes": creds["scopes"],
|
"redirectUris": creds["redirect_uris"],
|
||||||
"ownerId": owner_id,
|
"grantTypes": creds["grant_types"],
|
||||||
"isActive": True,
|
"scopes": creds["scopes"],
|
||||||
}
|
"ownerId": owner_id,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
click.echo(f"✓ Created test OAuth application: {app.clientId}")
|
click.echo(f"✓ Created test OAuth application: {app.clientId}")
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional, cast
|
||||||
|
|
||||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||||
from prisma.enums import APIKeyPermission, APIKeyStatus
|
from prisma.enums import APIKeyPermission, APIKeyStatus
|
||||||
from prisma.models import APIKey as PrismaAPIKey
|
from prisma.models import APIKey as PrismaAPIKey
|
||||||
from prisma.types import APIKeyWhereUniqueInput
|
from prisma.types import APIKeyCreateInput, APIKeyWhereUniqueInput
|
||||||
from pydantic import Field
|
from pydantic import Field
|
||||||
|
|
||||||
from backend.data.includes import MAX_USER_API_KEYS_FETCH
|
from backend.data.includes import MAX_USER_API_KEYS_FETCH
|
||||||
@@ -82,17 +82,20 @@ async def create_api_key(
|
|||||||
generated_key = keysmith.generate_key()
|
generated_key = keysmith.generate_key()
|
||||||
|
|
||||||
saved_key_obj = await PrismaAPIKey.prisma().create(
|
saved_key_obj = await PrismaAPIKey.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": str(uuid.uuid4()),
|
APIKeyCreateInput,
|
||||||
"name": name,
|
{
|
||||||
"head": generated_key.head,
|
"id": str(uuid.uuid4()),
|
||||||
"tail": generated_key.tail,
|
"name": name,
|
||||||
"hash": generated_key.hash,
|
"head": generated_key.head,
|
||||||
"salt": generated_key.salt,
|
"tail": generated_key.tail,
|
||||||
"permissions": [p for p in permissions],
|
"hash": generated_key.hash,
|
||||||
"description": description,
|
"salt": generated_key.salt,
|
||||||
"userId": user_id,
|
"permissions": [p for p in permissions],
|
||||||
}
|
"description": description,
|
||||||
|
"userId": user_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return APIKeyInfo.from_db(saved_key_obj), generated_key.key
|
return APIKeyInfo.from_db(saved_key_obj), generated_key.key
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import logging
|
|||||||
import secrets
|
import secrets
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional, cast
|
||||||
|
|
||||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||||
from prisma.enums import APIKeyPermission as APIPermission
|
from prisma.enums import APIKeyPermission as APIPermission
|
||||||
@@ -22,7 +22,12 @@ from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken
|
|||||||
from prisma.models import OAuthApplication as PrismaOAuthApplication
|
from prisma.models import OAuthApplication as PrismaOAuthApplication
|
||||||
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
||||||
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
||||||
from prisma.types import OAuthApplicationUpdateInput
|
from prisma.types import (
|
||||||
|
OAuthAccessTokenCreateInput,
|
||||||
|
OAuthApplicationUpdateInput,
|
||||||
|
OAuthAuthorizationCodeCreateInput,
|
||||||
|
OAuthRefreshTokenCreateInput,
|
||||||
|
)
|
||||||
from pydantic import BaseModel, Field, SecretStr
|
from pydantic import BaseModel, Field, SecretStr
|
||||||
|
|
||||||
from .base import APIAuthorizationInfo
|
from .base import APIAuthorizationInfo
|
||||||
@@ -359,17 +364,20 @@ async def create_authorization_code(
|
|||||||
expires_at = now + AUTHORIZATION_CODE_TTL
|
expires_at = now + AUTHORIZATION_CODE_TTL
|
||||||
|
|
||||||
saved_code = await PrismaOAuthAuthorizationCode.prisma().create(
|
saved_code = await PrismaOAuthAuthorizationCode.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": str(uuid.uuid4()),
|
OAuthAuthorizationCodeCreateInput,
|
||||||
"code": code,
|
{
|
||||||
"expiresAt": expires_at,
|
"id": str(uuid.uuid4()),
|
||||||
"applicationId": application_id,
|
"code": code,
|
||||||
"userId": user_id,
|
"expiresAt": expires_at,
|
||||||
"scopes": [s for s in scopes],
|
"applicationId": application_id,
|
||||||
"redirectUri": redirect_uri,
|
"userId": user_id,
|
||||||
"codeChallenge": code_challenge,
|
"scopes": [s for s in scopes],
|
||||||
"codeChallengeMethod": code_challenge_method,
|
"redirectUri": redirect_uri,
|
||||||
}
|
"codeChallenge": code_challenge,
|
||||||
|
"codeChallengeMethod": code_challenge_method,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return OAuthAuthorizationCodeInfo.from_db(saved_code)
|
return OAuthAuthorizationCodeInfo.from_db(saved_code)
|
||||||
@@ -490,14 +498,17 @@ async def create_access_token(
|
|||||||
expires_at = now + ACCESS_TOKEN_TTL
|
expires_at = now + ACCESS_TOKEN_TTL
|
||||||
|
|
||||||
saved_token = await PrismaOAuthAccessToken.prisma().create(
|
saved_token = await PrismaOAuthAccessToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": str(uuid.uuid4()),
|
OAuthAccessTokenCreateInput,
|
||||||
"token": token_hash, # SHA256 hash for direct lookup
|
{
|
||||||
"expiresAt": expires_at,
|
"id": str(uuid.uuid4()),
|
||||||
"applicationId": application_id,
|
"token": token_hash, # SHA256 hash for direct lookup
|
||||||
"userId": user_id,
|
"expiresAt": expires_at,
|
||||||
"scopes": [s for s in scopes],
|
"applicationId": application_id,
|
||||||
}
|
"userId": user_id,
|
||||||
|
"scopes": [s for s in scopes],
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return OAuthAccessToken.from_db(saved_token, plaintext_token=plaintext_token)
|
return OAuthAccessToken.from_db(saved_token, plaintext_token=plaintext_token)
|
||||||
@@ -607,14 +618,17 @@ async def create_refresh_token(
|
|||||||
expires_at = now + REFRESH_TOKEN_TTL
|
expires_at = now + REFRESH_TOKEN_TTL
|
||||||
|
|
||||||
saved_token = await PrismaOAuthRefreshToken.prisma().create(
|
saved_token = await PrismaOAuthRefreshToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": str(uuid.uuid4()),
|
OAuthRefreshTokenCreateInput,
|
||||||
"token": token_hash, # SHA256 hash for direct lookup
|
{
|
||||||
"expiresAt": expires_at,
|
"id": str(uuid.uuid4()),
|
||||||
"applicationId": application_id,
|
"token": token_hash, # SHA256 hash for direct lookup
|
||||||
"userId": user_id,
|
"expiresAt": expires_at,
|
||||||
"scopes": [s for s in scopes],
|
"applicationId": application_id,
|
||||||
}
|
"userId": user_id,
|
||||||
|
"scopes": [s for s in scopes],
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return OAuthRefreshToken.from_db(saved_token, plaintext_token=plaintext_token)
|
return OAuthRefreshToken.from_db(saved_token, plaintext_token=plaintext_token)
|
||||||
|
|||||||
@@ -5,12 +5,14 @@ This test was added to cover a previously untested code path that could lead to
|
|||||||
incorrect balance capping behavior.
|
incorrect balance capping behavior.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from typing import cast
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.errors import UniqueViolationError
|
from prisma.errors import UniqueViolationError
|
||||||
from prisma.models import CreditTransaction, User, UserBalance
|
from prisma.models import CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import UserBalanceUpsertInput, UserCreateInput
|
||||||
|
|
||||||
from backend.data.credit import UserCredit
|
from backend.data.credit import UserCredit
|
||||||
from backend.util.json import SafeJson
|
from backend.util.json import SafeJson
|
||||||
@@ -21,11 +23,14 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
"""Create a test user for ceiling tests."""
|
"""Create a test user for ceiling tests."""
|
||||||
try:
|
try:
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": user_id,
|
UserCreateInput,
|
||||||
"email": f"test-{user_id}@example.com",
|
{
|
||||||
"name": f"Test User {user_id[:8]}",
|
"id": user_id,
|
||||||
}
|
"email": f"test-{user_id}@example.com",
|
||||||
|
"name": f"Test User {user_id[:8]}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except UniqueViolationError:
|
except UniqueViolationError:
|
||||||
# User already exists, continue
|
# User already exists, continue
|
||||||
@@ -33,7 +38,10 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
|
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
data=cast(
|
||||||
|
UserBalanceUpsertInput,
|
||||||
|
{"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ without race conditions, deadlocks, or inconsistent state.
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import random
|
import random
|
||||||
|
from typing import cast
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import prisma.enums
|
import prisma.enums
|
||||||
@@ -14,6 +15,7 @@ import pytest
|
|||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.errors import UniqueViolationError
|
from prisma.errors import UniqueViolationError
|
||||||
from prisma.models import CreditTransaction, User, UserBalance
|
from prisma.models import CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import UserBalanceUpsertInput, UserCreateInput
|
||||||
|
|
||||||
from backend.data.credit import POSTGRES_INT_MAX, UsageTransactionMetadata, UserCredit
|
from backend.data.credit import POSTGRES_INT_MAX, UsageTransactionMetadata, UserCredit
|
||||||
from backend.util.exceptions import InsufficientBalanceError
|
from backend.util.exceptions import InsufficientBalanceError
|
||||||
@@ -28,11 +30,14 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
"""Create a test user with initial balance."""
|
"""Create a test user with initial balance."""
|
||||||
try:
|
try:
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": user_id,
|
UserCreateInput,
|
||||||
"email": f"test-{user_id}@example.com",
|
{
|
||||||
"name": f"Test User {user_id[:8]}",
|
"id": user_id,
|
||||||
}
|
"email": f"test-{user_id}@example.com",
|
||||||
|
"name": f"Test User {user_id[:8]}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except UniqueViolationError:
|
except UniqueViolationError:
|
||||||
# User already exists, continue
|
# User already exists, continue
|
||||||
@@ -41,7 +46,10 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
# Ensure UserBalance record exists
|
# Ensure UserBalance record exists
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
data=cast(
|
||||||
|
UserBalanceUpsertInput,
|
||||||
|
{"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -342,10 +350,13 @@ async def test_integer_overflow_protection(server: SpinTestServer):
|
|||||||
# First, set balance near max
|
# First, set balance near max
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": max_int - 100},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": max_int - 100},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": max_int - 100},
|
||||||
|
"update": {"balance": max_int - 100},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try to add more than possible - should clamp to POSTGRES_INT_MAX
|
# Try to add more than possible - should clamp to POSTGRES_INT_MAX
|
||||||
|
|||||||
@@ -5,9 +5,12 @@ These tests run actual database operations to ensure SQL queries work correctly,
|
|||||||
which would have caught the CreditTransactionType enum casting bug.
|
which would have caught the CreditTransactionType enum casting bug.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.models import CreditTransaction, User, UserBalance
|
from prisma.models import CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import UserCreateInput
|
||||||
|
|
||||||
from backend.data.credit import (
|
from backend.data.credit import (
|
||||||
AutoTopUpConfig,
|
AutoTopUpConfig,
|
||||||
@@ -29,12 +32,15 @@ async def cleanup_test_user():
|
|||||||
# Create the user first
|
# Create the user first
|
||||||
try:
|
try:
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": user_id,
|
UserCreateInput,
|
||||||
"email": f"test-{user_id}@example.com",
|
{
|
||||||
"topUpConfig": SafeJson({}),
|
"id": user_id,
|
||||||
"timezone": "UTC",
|
"email": f"test-{user_id}@example.com",
|
||||||
}
|
"topUpConfig": SafeJson({}),
|
||||||
|
"timezone": "UTC",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
# User might already exist, that's fine
|
# User might already exist, that's fine
|
||||||
|
|||||||
@@ -6,12 +6,19 @@ are atomic and maintain data consistency.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
from typing import cast
|
||||||
from unittest.mock import MagicMock, patch
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import stripe
|
import stripe
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.models import CreditRefundRequest, CreditTransaction, User, UserBalance
|
from prisma.models import CreditRefundRequest, CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import (
|
||||||
|
CreditRefundRequestCreateInput,
|
||||||
|
CreditTransactionCreateInput,
|
||||||
|
UserBalanceCreateInput,
|
||||||
|
UserCreateInput,
|
||||||
|
)
|
||||||
|
|
||||||
from backend.data.credit import UserCredit
|
from backend.data.credit import UserCredit
|
||||||
from backend.util.json import SafeJson
|
from backend.util.json import SafeJson
|
||||||
@@ -35,32 +42,41 @@ async def setup_test_user_with_topup():
|
|||||||
|
|
||||||
# Create user
|
# Create user
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": REFUND_TEST_USER_ID,
|
UserCreateInput,
|
||||||
"email": f"{REFUND_TEST_USER_ID}@example.com",
|
{
|
||||||
"name": "Refund Test User",
|
"id": REFUND_TEST_USER_ID,
|
||||||
}
|
"email": f"{REFUND_TEST_USER_ID}@example.com",
|
||||||
|
"name": "Refund Test User",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create user balance
|
# Create user balance
|
||||||
await UserBalance.prisma().create(
|
await UserBalance.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": REFUND_TEST_USER_ID,
|
UserBalanceCreateInput,
|
||||||
"balance": 1000, # $10
|
{
|
||||||
}
|
"userId": REFUND_TEST_USER_ID,
|
||||||
|
"balance": 1000, # $10
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a top-up transaction that can be refunded
|
# Create a top-up transaction that can be refunded
|
||||||
topup_tx = await CreditTransaction.prisma().create(
|
topup_tx = await CreditTransaction.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": REFUND_TEST_USER_ID,
|
CreditTransactionCreateInput,
|
||||||
"amount": 1000,
|
{
|
||||||
"type": CreditTransactionType.TOP_UP,
|
"userId": REFUND_TEST_USER_ID,
|
||||||
"transactionKey": "pi_test_12345",
|
"amount": 1000,
|
||||||
"runningBalance": 1000,
|
"type": CreditTransactionType.TOP_UP,
|
||||||
"isActive": True,
|
"transactionKey": "pi_test_12345",
|
||||||
"metadata": SafeJson({"stripe_payment_intent": "pi_test_12345"}),
|
"runningBalance": 1000,
|
||||||
}
|
"isActive": True,
|
||||||
|
"metadata": SafeJson({"stripe_payment_intent": "pi_test_12345"}),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return topup_tx
|
return topup_tx
|
||||||
@@ -93,12 +109,15 @@ async def test_deduct_credits_atomic(server: SpinTestServer):
|
|||||||
|
|
||||||
# Create refund request record (simulating webhook flow)
|
# Create refund request record (simulating webhook flow)
|
||||||
await CreditRefundRequest.prisma().create(
|
await CreditRefundRequest.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": REFUND_TEST_USER_ID,
|
CreditRefundRequestCreateInput,
|
||||||
"amount": 500,
|
{
|
||||||
"transactionKey": topup_tx.transactionKey, # Should match the original transaction
|
"userId": REFUND_TEST_USER_ID,
|
||||||
"reason": "Test refund",
|
"amount": 500,
|
||||||
}
|
"transactionKey": topup_tx.transactionKey, # Should match the original transaction
|
||||||
|
"reason": "Test refund",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Call deduct_credits
|
# Call deduct_credits
|
||||||
@@ -286,12 +305,15 @@ async def test_concurrent_refunds(server: SpinTestServer):
|
|||||||
refund_requests = []
|
refund_requests = []
|
||||||
for i in range(5):
|
for i in range(5):
|
||||||
req = await CreditRefundRequest.prisma().create(
|
req = await CreditRefundRequest.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": REFUND_TEST_USER_ID,
|
CreditRefundRequestCreateInput,
|
||||||
"amount": 100, # $1 each
|
{
|
||||||
"transactionKey": topup_tx.transactionKey,
|
"userId": REFUND_TEST_USER_ID,
|
||||||
"reason": f"Test refund {i}",
|
"amount": 100, # $1 each
|
||||||
}
|
"transactionKey": topup_tx.transactionKey,
|
||||||
|
"reason": f"Test refund {i}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
refund_requests.append(req)
|
refund_requests.append(req)
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.models import CreditTransaction, UserBalance
|
from prisma.models import CreditTransaction, UserBalance
|
||||||
|
from prisma.types import CreditTransactionCreateInput, UserBalanceUpsertInput
|
||||||
|
|
||||||
from backend.blocks.llm import AITextGeneratorBlock
|
from backend.blocks.llm import AITextGeneratorBlock
|
||||||
from backend.data.block import get_block
|
from backend.data.block import get_block
|
||||||
@@ -23,10 +25,13 @@ async def disable_test_user_transactions():
|
|||||||
old_date = datetime.now(timezone.utc) - timedelta(days=35) # More than a month ago
|
old_date = datetime.now(timezone.utc) - timedelta(days=35) # More than a month ago
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": DEFAULT_USER_ID},
|
where={"userId": DEFAULT_USER_ID},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": DEFAULT_USER_ID, "balance": 0},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": 0, "updatedAt": old_date},
|
{
|
||||||
},
|
"create": {"userId": DEFAULT_USER_ID, "balance": 0},
|
||||||
|
"update": {"balance": 0, "updatedAt": old_date},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -140,23 +145,29 @@ async def test_block_credit_reset(server: SpinTestServer):
|
|||||||
|
|
||||||
# Manually create a transaction with month 1 timestamp to establish history
|
# Manually create a transaction with month 1 timestamp to establish history
|
||||||
await CreditTransaction.prisma().create(
|
await CreditTransaction.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": DEFAULT_USER_ID,
|
CreditTransactionCreateInput,
|
||||||
"amount": 100,
|
{
|
||||||
"type": CreditTransactionType.TOP_UP,
|
"userId": DEFAULT_USER_ID,
|
||||||
"runningBalance": 1100,
|
"amount": 100,
|
||||||
"isActive": True,
|
"type": CreditTransactionType.TOP_UP,
|
||||||
"createdAt": month1, # Set specific timestamp
|
"runningBalance": 1100,
|
||||||
}
|
"isActive": True,
|
||||||
|
"createdAt": month1, # Set specific timestamp
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Update user balance to match
|
# Update user balance to match
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": DEFAULT_USER_ID},
|
where={"userId": DEFAULT_USER_ID},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": DEFAULT_USER_ID, "balance": 1100},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": 1100},
|
{
|
||||||
},
|
"create": {"userId": DEFAULT_USER_ID, "balance": 1100},
|
||||||
|
"update": {"balance": 1100},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Now test month 2 behavior
|
# Now test month 2 behavior
|
||||||
@@ -175,14 +186,17 @@ async def test_block_credit_reset(server: SpinTestServer):
|
|||||||
|
|
||||||
# Create a month 2 transaction to update the last transaction time
|
# Create a month 2 transaction to update the last transaction time
|
||||||
await CreditTransaction.prisma().create(
|
await CreditTransaction.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": DEFAULT_USER_ID,
|
CreditTransactionCreateInput,
|
||||||
"amount": -700, # Spent 700 to get to 400
|
{
|
||||||
"type": CreditTransactionType.USAGE,
|
"userId": DEFAULT_USER_ID,
|
||||||
"runningBalance": 400,
|
"amount": -700, # Spent 700 to get to 400
|
||||||
"isActive": True,
|
"type": CreditTransactionType.USAGE,
|
||||||
"createdAt": month2,
|
"runningBalance": 400,
|
||||||
}
|
"isActive": True,
|
||||||
|
"createdAt": month2,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Move to month 3
|
# Move to month 3
|
||||||
|
|||||||
@@ -6,12 +6,14 @@ doesn't underflow below POSTGRES_INT_MIN, which could cause integer wraparound i
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from typing import cast
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.errors import UniqueViolationError
|
from prisma.errors import UniqueViolationError
|
||||||
from prisma.models import CreditTransaction, User, UserBalance
|
from prisma.models import CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import UserBalanceUpsertInput, UserCreateInput
|
||||||
|
|
||||||
from backend.data.credit import POSTGRES_INT_MIN, UserCredit
|
from backend.data.credit import POSTGRES_INT_MIN, UserCredit
|
||||||
from backend.util.test import SpinTestServer
|
from backend.util.test import SpinTestServer
|
||||||
@@ -21,11 +23,14 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
"""Create a test user for underflow tests."""
|
"""Create a test user for underflow tests."""
|
||||||
try:
|
try:
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": user_id,
|
UserCreateInput,
|
||||||
"email": f"test-{user_id}@example.com",
|
{
|
||||||
"name": f"Test User {user_id[:8]}",
|
"id": user_id,
|
||||||
}
|
"email": f"test-{user_id}@example.com",
|
||||||
|
"name": f"Test User {user_id[:8]}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except UniqueViolationError:
|
except UniqueViolationError:
|
||||||
# User already exists, continue
|
# User already exists, continue
|
||||||
@@ -33,7 +38,10 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
|
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
data=cast(
|
||||||
|
UserBalanceUpsertInput,
|
||||||
|
{"create": {"userId": user_id, "balance": 0}, "update": {"balance": 0}},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -70,10 +78,13 @@ async def test_debug_underflow_step_by_step(server: SpinTestServer):
|
|||||||
|
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": initial_balance_target},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": initial_balance_target},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": initial_balance_target},
|
||||||
|
"update": {"balance": initial_balance_target},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
current_balance = await credit_system.get_credits(user_id)
|
current_balance = await credit_system.get_credits(user_id)
|
||||||
@@ -110,10 +121,13 @@ async def test_debug_underflow_step_by_step(server: SpinTestServer):
|
|||||||
# Set balance to exactly POSTGRES_INT_MIN
|
# Set balance to exactly POSTGRES_INT_MIN
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": POSTGRES_INT_MIN},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": POSTGRES_INT_MIN},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": POSTGRES_INT_MIN},
|
||||||
|
"update": {"balance": POSTGRES_INT_MIN},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
edge_balance = await credit_system.get_credits(user_id)
|
edge_balance = await credit_system.get_credits(user_id)
|
||||||
@@ -152,10 +166,13 @@ async def test_underflow_protection_large_refunds(server: SpinTestServer):
|
|||||||
test_balance = POSTGRES_INT_MIN + 1000
|
test_balance = POSTGRES_INT_MIN + 1000
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": test_balance},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": test_balance},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": test_balance},
|
||||||
|
"update": {"balance": test_balance},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
current_balance = await credit_system.get_credits(user_id)
|
current_balance = await credit_system.get_credits(user_id)
|
||||||
@@ -217,10 +234,13 @@ async def test_multiple_large_refunds_cumulative_underflow(server: SpinTestServe
|
|||||||
initial_balance = POSTGRES_INT_MIN + 500 # Close to minimum but with some room
|
initial_balance = POSTGRES_INT_MIN + 500 # Close to minimum but with some room
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": initial_balance},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": initial_balance},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": initial_balance},
|
||||||
|
"update": {"balance": initial_balance},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Apply multiple refunds that would cumulatively underflow
|
# Apply multiple refunds that would cumulatively underflow
|
||||||
@@ -295,10 +315,13 @@ async def test_concurrent_large_refunds_no_underflow(server: SpinTestServer):
|
|||||||
initial_balance = POSTGRES_INT_MIN + 1000 # Close to minimum
|
initial_balance = POSTGRES_INT_MIN + 1000 # Close to minimum
|
||||||
await UserBalance.prisma().upsert(
|
await UserBalance.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, "balance": initial_balance},
|
UserBalanceUpsertInput,
|
||||||
"update": {"balance": initial_balance},
|
{
|
||||||
},
|
"create": {"userId": user_id, "balance": initial_balance},
|
||||||
|
"update": {"balance": initial_balance},
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
async def large_refund(amount: int, label: str):
|
async def large_refund(amount: int, label: str):
|
||||||
|
|||||||
@@ -9,11 +9,13 @@ This test ensures that:
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from prisma.enums import CreditTransactionType
|
from prisma.enums import CreditTransactionType
|
||||||
from prisma.errors import UniqueViolationError
|
from prisma.errors import UniqueViolationError
|
||||||
from prisma.models import CreditTransaction, User, UserBalance
|
from prisma.models import CreditTransaction, User, UserBalance
|
||||||
|
from prisma.types import UserBalanceCreateInput, UserCreateInput
|
||||||
|
|
||||||
from backend.data.credit import UsageTransactionMetadata, UserCredit
|
from backend.data.credit import UsageTransactionMetadata, UserCredit
|
||||||
from backend.util.json import SafeJson
|
from backend.util.json import SafeJson
|
||||||
@@ -24,11 +26,14 @@ async def create_test_user(user_id: str) -> None:
|
|||||||
"""Create a test user for migration tests."""
|
"""Create a test user for migration tests."""
|
||||||
try:
|
try:
|
||||||
await User.prisma().create(
|
await User.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": user_id,
|
UserCreateInput,
|
||||||
"email": f"test-{user_id}@example.com",
|
{
|
||||||
"name": f"Test User {user_id[:8]}",
|
"id": user_id,
|
||||||
}
|
"email": f"test-{user_id}@example.com",
|
||||||
|
"name": f"Test User {user_id[:8]}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except UniqueViolationError:
|
except UniqueViolationError:
|
||||||
# User already exists, continue
|
# User already exists, continue
|
||||||
@@ -121,7 +126,9 @@ async def test_detect_stale_user_balance_queries(server: SpinTestServer):
|
|||||||
try:
|
try:
|
||||||
# Create UserBalance with specific value
|
# Create UserBalance with specific value
|
||||||
await UserBalance.prisma().create(
|
await UserBalance.prisma().create(
|
||||||
data={"userId": user_id, "balance": 5000} # $50
|
data=cast(
|
||||||
|
UserBalanceCreateInput, {"userId": user_id, "balance": 5000}
|
||||||
|
) # $50
|
||||||
)
|
)
|
||||||
|
|
||||||
# Verify that get_credits returns UserBalance value (5000), not any stale User.balance value
|
# Verify that get_credits returns UserBalance value (5000), not any stale User.balance value
|
||||||
@@ -160,7 +167,9 @@ async def test_concurrent_operations_use_userbalance_only(server: SpinTestServer
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# Set initial balance in UserBalance
|
# Set initial balance in UserBalance
|
||||||
await UserBalance.prisma().create(data={"userId": user_id, "balance": 1000})
|
await UserBalance.prisma().create(
|
||||||
|
data=cast(UserBalanceCreateInput, {"userId": user_id, "balance": 1000})
|
||||||
|
)
|
||||||
|
|
||||||
# Run concurrent operations to ensure they all use UserBalance atomic operations
|
# Run concurrent operations to ensure they all use UserBalance atomic operations
|
||||||
async def concurrent_spend(amount: int, label: str):
|
async def concurrent_spend(amount: int, label: str):
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from prisma.models import (
|
|||||||
AgentNodeExecutionKeyValueData,
|
AgentNodeExecutionKeyValueData,
|
||||||
)
|
)
|
||||||
from prisma.types import (
|
from prisma.types import (
|
||||||
|
AgentGraphExecutionCreateInput,
|
||||||
AgentGraphExecutionUpdateManyMutationInput,
|
AgentGraphExecutionUpdateManyMutationInput,
|
||||||
AgentGraphExecutionWhereInput,
|
AgentGraphExecutionWhereInput,
|
||||||
AgentNodeExecutionCreateInput,
|
AgentNodeExecutionCreateInput,
|
||||||
@@ -35,7 +36,6 @@ from prisma.types import (
|
|||||||
AgentNodeExecutionKeyValueDataCreateInput,
|
AgentNodeExecutionKeyValueDataCreateInput,
|
||||||
AgentNodeExecutionUpdateInput,
|
AgentNodeExecutionUpdateInput,
|
||||||
AgentNodeExecutionWhereInput,
|
AgentNodeExecutionWhereInput,
|
||||||
AgentNodeExecutionWhereUniqueInput,
|
|
||||||
)
|
)
|
||||||
from pydantic import BaseModel, ConfigDict, JsonValue, ValidationError
|
from pydantic import BaseModel, ConfigDict, JsonValue, ValidationError
|
||||||
from pydantic.fields import Field
|
from pydantic.fields import Field
|
||||||
@@ -709,37 +709,40 @@ async def create_graph_execution(
|
|||||||
The id of the AgentGraphExecution and the list of ExecutionResult for each node.
|
The id of the AgentGraphExecution and the list of ExecutionResult for each node.
|
||||||
"""
|
"""
|
||||||
result = await AgentGraphExecution.prisma().create(
|
result = await AgentGraphExecution.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"agentGraphId": graph_id,
|
AgentGraphExecutionCreateInput,
|
||||||
"agentGraphVersion": graph_version,
|
{
|
||||||
"executionStatus": ExecutionStatus.INCOMPLETE,
|
"agentGraphId": graph_id,
|
||||||
"inputs": SafeJson(inputs),
|
"agentGraphVersion": graph_version,
|
||||||
"credentialInputs": (
|
"executionStatus": ExecutionStatus.INCOMPLETE,
|
||||||
SafeJson(credential_inputs) if credential_inputs else Json({})
|
"inputs": SafeJson(inputs),
|
||||||
),
|
"credentialInputs": (
|
||||||
"nodesInputMasks": (
|
SafeJson(credential_inputs) if credential_inputs else Json({})
|
||||||
SafeJson(nodes_input_masks) if nodes_input_masks else Json({})
|
),
|
||||||
),
|
"nodesInputMasks": (
|
||||||
"NodeExecutions": {
|
SafeJson(nodes_input_masks) if nodes_input_masks else Json({})
|
||||||
"create": [
|
),
|
||||||
AgentNodeExecutionCreateInput(
|
"NodeExecutions": {
|
||||||
agentNodeId=node_id,
|
"create": [
|
||||||
executionStatus=ExecutionStatus.QUEUED,
|
AgentNodeExecutionCreateInput(
|
||||||
queuedTime=datetime.now(tz=timezone.utc),
|
agentNodeId=node_id,
|
||||||
Input={
|
executionStatus=ExecutionStatus.QUEUED,
|
||||||
"create": [
|
queuedTime=datetime.now(tz=timezone.utc),
|
||||||
{"name": name, "data": SafeJson(data)}
|
Input={
|
||||||
for name, data in node_input.items()
|
"create": [
|
||||||
]
|
{"name": name, "data": SafeJson(data)}
|
||||||
},
|
for name, data in node_input.items()
|
||||||
)
|
]
|
||||||
for node_id, node_input in starting_nodes_input
|
},
|
||||||
]
|
)
|
||||||
|
for node_id, node_input in starting_nodes_input
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"userId": user_id,
|
||||||
|
"agentPresetId": preset_id,
|
||||||
|
"parentGraphExecutionId": parent_graph_exec_id,
|
||||||
},
|
},
|
||||||
"userId": user_id,
|
),
|
||||||
"agentPresetId": preset_id,
|
|
||||||
"parentGraphExecutionId": parent_graph_exec_id,
|
|
||||||
},
|
|
||||||
include=GRAPH_EXECUTION_INCLUDE_WITH_NODES,
|
include=GRAPH_EXECUTION_INCLUDE_WITH_NODES,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -831,10 +834,13 @@ async def upsert_execution_output(
|
|||||||
"""
|
"""
|
||||||
Insert AgentNodeExecutionInputOutput record for as one of AgentNodeExecution.Output.
|
Insert AgentNodeExecutionInputOutput record for as one of AgentNodeExecution.Output.
|
||||||
"""
|
"""
|
||||||
data: AgentNodeExecutionInputOutputCreateInput = {
|
data: AgentNodeExecutionInputOutputCreateInput = cast(
|
||||||
"name": output_name,
|
AgentNodeExecutionInputOutputCreateInput,
|
||||||
"referencedByOutputExecId": node_exec_id,
|
{
|
||||||
}
|
"name": output_name,
|
||||||
|
"referencedByOutputExecId": node_exec_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
if output_data is not None:
|
if output_data is not None:
|
||||||
data["data"] = SafeJson(output_data)
|
data["data"] = SafeJson(output_data)
|
||||||
await AgentNodeExecutionInputOutput.prisma().create(data=data)
|
await AgentNodeExecutionInputOutput.prisma().create(data=data)
|
||||||
@@ -974,25 +980,30 @@ async def update_node_execution_status(
|
|||||||
f"Invalid status transition: {status} has no valid source statuses"
|
f"Invalid status transition: {status} has no valid source statuses"
|
||||||
)
|
)
|
||||||
|
|
||||||
if res := await AgentNodeExecution.prisma().update(
|
# First verify the current status allows this transition
|
||||||
where=cast(
|
current_exec = await AgentNodeExecution.prisma().find_unique(
|
||||||
AgentNodeExecutionWhereUniqueInput,
|
where={"id": node_exec_id}, include=EXECUTION_RESULT_INCLUDE
|
||||||
{
|
)
|
||||||
"id": node_exec_id,
|
|
||||||
"executionStatus": {"in": [s.value for s in allowed_from]},
|
if not current_exec:
|
||||||
},
|
raise ValueError(f"Execution {node_exec_id} not found.")
|
||||||
),
|
|
||||||
|
# Check if current status allows the requested transition
|
||||||
|
if current_exec.executionStatus not in allowed_from:
|
||||||
|
# Status transition not allowed, return current state without updating
|
||||||
|
return NodeExecutionResult.from_db(current_exec)
|
||||||
|
|
||||||
|
# Status transition is valid, perform the update
|
||||||
|
updated_exec = await AgentNodeExecution.prisma().update(
|
||||||
|
where={"id": node_exec_id},
|
||||||
data=_get_update_status_data(status, execution_data, stats),
|
data=_get_update_status_data(status, execution_data, stats),
|
||||||
include=EXECUTION_RESULT_INCLUDE,
|
include=EXECUTION_RESULT_INCLUDE,
|
||||||
):
|
)
|
||||||
return NodeExecutionResult.from_db(res)
|
|
||||||
|
|
||||||
if res := await AgentNodeExecution.prisma().find_unique(
|
if not updated_exec:
|
||||||
where={"id": node_exec_id}, include=EXECUTION_RESULT_INCLUDE
|
raise ValueError(f"Failed to update execution {node_exec_id}.")
|
||||||
):
|
|
||||||
return NodeExecutionResult.from_db(res)
|
|
||||||
|
|
||||||
raise ValueError(f"Execution {node_exec_id} not found.")
|
return NodeExecutionResult.from_db(updated_exec)
|
||||||
|
|
||||||
|
|
||||||
def _get_update_status_data(
|
def _get_update_status_data(
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ Handles all database operations for pending human reviews.
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Optional
|
from typing import Optional, cast
|
||||||
|
|
||||||
from prisma.enums import ReviewStatus
|
from prisma.enums import ReviewStatus
|
||||||
from prisma.models import PendingHumanReview
|
from prisma.models import PendingHumanReview
|
||||||
from prisma.types import PendingHumanReviewUpdateInput
|
from prisma.types import PendingHumanReviewUpdateInput, PendingHumanReviewUpsertInput
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from backend.server.v2.executions.review.model import (
|
from backend.server.v2.executions.review.model import (
|
||||||
@@ -66,20 +66,23 @@ async def get_or_create_human_review(
|
|||||||
# Upsert - get existing or create new review
|
# Upsert - get existing or create new review
|
||||||
review = await PendingHumanReview.prisma().upsert(
|
review = await PendingHumanReview.prisma().upsert(
|
||||||
where={"nodeExecId": node_exec_id},
|
where={"nodeExecId": node_exec_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {
|
PendingHumanReviewUpsertInput,
|
||||||
"userId": user_id,
|
{
|
||||||
"nodeExecId": node_exec_id,
|
"create": {
|
||||||
"graphExecId": graph_exec_id,
|
"userId": user_id,
|
||||||
"graphId": graph_id,
|
"nodeExecId": node_exec_id,
|
||||||
"graphVersion": graph_version,
|
"graphExecId": graph_exec_id,
|
||||||
"payload": SafeJson(input_data),
|
"graphId": graph_id,
|
||||||
"instructions": message,
|
"graphVersion": graph_version,
|
||||||
"editable": editable,
|
"payload": SafeJson(input_data),
|
||||||
"status": ReviewStatus.WAITING,
|
"instructions": message,
|
||||||
|
"editable": editable,
|
||||||
|
"status": ReviewStatus.WAITING,
|
||||||
|
},
|
||||||
|
"update": {}, # Do nothing on update - keep existing review as is
|
||||||
},
|
},
|
||||||
"update": {}, # Do nothing on update - keep existing review as is
|
),
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
|
|||||||
@@ -1,13 +1,17 @@
|
|||||||
import re
|
import re
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Any, Literal, Optional
|
from typing import Any, Literal, Optional, cast
|
||||||
from zoneinfo import ZoneInfo
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
import prisma
|
import prisma
|
||||||
import pydantic
|
import pydantic
|
||||||
from prisma.enums import OnboardingStep
|
from prisma.enums import OnboardingStep
|
||||||
from prisma.models import UserOnboarding
|
from prisma.models import UserOnboarding
|
||||||
from prisma.types import UserOnboardingCreateInput, UserOnboardingUpdateInput
|
from prisma.types import (
|
||||||
|
UserOnboardingCreateInput,
|
||||||
|
UserOnboardingUpdateInput,
|
||||||
|
UserOnboardingUpsertInput,
|
||||||
|
)
|
||||||
|
|
||||||
from backend.data import execution as execution_db
|
from backend.data import execution as execution_db
|
||||||
from backend.data.credit import get_user_credit_model
|
from backend.data.credit import get_user_credit_model
|
||||||
@@ -112,10 +116,13 @@ async def update_user_onboarding(user_id: str, data: UserOnboardingUpdate):
|
|||||||
|
|
||||||
return await UserOnboarding.prisma().upsert(
|
return await UserOnboarding.prisma().upsert(
|
||||||
where={"userId": user_id},
|
where={"userId": user_id},
|
||||||
data={
|
data=cast(
|
||||||
"create": {"userId": user_id, **update},
|
UserOnboardingUpsertInput,
|
||||||
"update": update,
|
{
|
||||||
},
|
"create": {"userId": user_id, **update},
|
||||||
|
"update": update,
|
||||||
|
},
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -49,11 +49,10 @@
|
|||||||
</p>
|
</p>
|
||||||
<ol style="margin-bottom: 10px;">
|
<ol style="margin-bottom: 10px;">
|
||||||
<li>
|
<li>
|
||||||
Visit the Supabase Dashboard:
|
Connect to the database using your preferred database client.
|
||||||
https://supabase.com/dashboard/project/bgwpwdsxblryihinutbx/editor
|
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Navigate to the <strong>RefundRequest</strong> table.
|
Navigate to the <strong>RefundRequest</strong> table in the <strong>platform</strong> schema.
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
Filter the <code>transactionKey</code> column with the Transaction ID: <strong>{{ data.transaction_id }}</strong>.
|
Filter the <code>transactionKey</code> column with the Transaction ID: <strong>{{ data.transaction_id }}</strong>.
|
||||||
|
|||||||
13
autogpt_platform/backend/backend/server/auth/__init__.py
Normal file
13
autogpt_platform/backend/backend/server/auth/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
"""
|
||||||
|
Authentication module for the AutoGPT Platform.
|
||||||
|
|
||||||
|
This module provides FastAPI-based authentication supporting:
|
||||||
|
- Email/password authentication with bcrypt hashing
|
||||||
|
- Google OAuth authentication
|
||||||
|
- JWT token management (access + refresh tokens)
|
||||||
|
"""
|
||||||
|
|
||||||
|
from .routes import router as auth_router
|
||||||
|
from .service import AuthService
|
||||||
|
|
||||||
|
__all__ = ["auth_router", "AuthService"]
|
||||||
170
autogpt_platform/backend/backend/server/auth/email.py
Normal file
170
autogpt_platform/backend/backend/server/auth/email.py
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
"""
|
||||||
|
Direct email sending for authentication flows.
|
||||||
|
|
||||||
|
This module bypasses the notification queue system to ensure auth emails
|
||||||
|
(password reset, email verification) are sent immediately in all environments.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
from postmarker.core import PostmarkClient
|
||||||
|
|
||||||
|
from backend.util.settings import Settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
settings = Settings()
|
||||||
|
|
||||||
|
# Template directory
|
||||||
|
TEMPLATE_DIR = pathlib.Path(__file__).parent / "templates"
|
||||||
|
|
||||||
|
|
||||||
|
class AuthEmailSender:
|
||||||
|
"""Handles direct email sending for authentication flows."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
if settings.secrets.postmark_server_api_token:
|
||||||
|
self.postmark = PostmarkClient(
|
||||||
|
server_token=settings.secrets.postmark_server_api_token
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Postmark server API token not found, auth email sending disabled"
|
||||||
|
)
|
||||||
|
self.postmark = None
|
||||||
|
|
||||||
|
# Set up Jinja2 environment for templates
|
||||||
|
self.jinja_env: Optional[Environment] = None
|
||||||
|
if TEMPLATE_DIR.exists():
|
||||||
|
self.jinja_env = Environment(
|
||||||
|
loader=FileSystemLoader(str(TEMPLATE_DIR)),
|
||||||
|
autoescape=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Auth email templates directory not found: {TEMPLATE_DIR}")
|
||||||
|
|
||||||
|
def _get_frontend_url(self) -> str:
|
||||||
|
"""Get the frontend base URL for email links."""
|
||||||
|
return (
|
||||||
|
settings.config.frontend_base_url
|
||||||
|
or settings.config.platform_base_url
|
||||||
|
or "http://localhost:3000"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _render_template(
|
||||||
|
self, template_name: str, subject: str, **context
|
||||||
|
) -> tuple[str, str]:
|
||||||
|
"""Render an email template with the base template wrapper."""
|
||||||
|
if not self.jinja_env:
|
||||||
|
raise RuntimeError("Email templates not available")
|
||||||
|
|
||||||
|
# Render the content template
|
||||||
|
content_template = self.jinja_env.get_template(template_name)
|
||||||
|
content = content_template.render(**context)
|
||||||
|
|
||||||
|
# Render with base template
|
||||||
|
base_template = self.jinja_env.get_template("base.html.jinja2")
|
||||||
|
html_body = base_template.render(
|
||||||
|
data={"title": subject, "message": content, "unsubscribe_link": None}
|
||||||
|
)
|
||||||
|
|
||||||
|
return subject, html_body
|
||||||
|
|
||||||
|
def _send_email(self, to_email: str, subject: str, html_body: str) -> bool:
|
||||||
|
"""Send an email directly via Postmark."""
|
||||||
|
if not self.postmark:
|
||||||
|
logger.warning(
|
||||||
|
f"Postmark not configured. Would send email to {to_email}: {subject}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.postmark.emails.send( # type: ignore[attr-defined]
|
||||||
|
From=settings.config.postmark_sender_email,
|
||||||
|
To=to_email,
|
||||||
|
Subject=subject,
|
||||||
|
HtmlBody=html_body,
|
||||||
|
)
|
||||||
|
logger.info(f"Auth email sent to {to_email}: {subject}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send auth email to {to_email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_password_reset_email(
|
||||||
|
self, to_email: str, reset_token: str, user_name: Optional[str] = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send a password reset email.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email address
|
||||||
|
reset_token: The raw password reset token
|
||||||
|
user_name: Optional user name for personalization
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if email was sent successfully, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
frontend_url = self._get_frontend_url()
|
||||||
|
reset_link = f"{frontend_url}/reset-password?token={reset_token}"
|
||||||
|
|
||||||
|
subject, html_body = self._render_template(
|
||||||
|
"password_reset.html.jinja2",
|
||||||
|
subject="Reset Your AutoGPT Password",
|
||||||
|
reset_link=reset_link,
|
||||||
|
user_name=user_name,
|
||||||
|
frontend_url=frontend_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._send_email(to_email, subject, html_body)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send password reset email to {to_email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def send_email_verification(
|
||||||
|
self, to_email: str, verification_token: str, user_name: Optional[str] = None
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Send an email verification email.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
to_email: Recipient email address
|
||||||
|
verification_token: The raw verification token
|
||||||
|
user_name: Optional user name for personalization
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if email was sent successfully, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
frontend_url = self._get_frontend_url()
|
||||||
|
verification_link = (
|
||||||
|
f"{frontend_url}/verify-email?token={verification_token}"
|
||||||
|
)
|
||||||
|
|
||||||
|
subject, html_body = self._render_template(
|
||||||
|
"email_verification.html.jinja2",
|
||||||
|
subject="Verify Your AutoGPT Email",
|
||||||
|
verification_link=verification_link,
|
||||||
|
user_name=user_name,
|
||||||
|
frontend_url=frontend_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._send_email(to_email, subject, html_body)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to send verification email to {to_email}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
_auth_email_sender: Optional[AuthEmailSender] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_email_sender() -> AuthEmailSender:
|
||||||
|
"""Get or create the auth email sender singleton."""
|
||||||
|
global _auth_email_sender
|
||||||
|
if _auth_email_sender is None:
|
||||||
|
_auth_email_sender = AuthEmailSender()
|
||||||
|
return _auth_email_sender
|
||||||
505
autogpt_platform/backend/backend/server/auth/routes.py
Normal file
505
autogpt_platform/backend/backend/server/auth/routes.py
Normal file
@@ -0,0 +1,505 @@
|
|||||||
|
"""
|
||||||
|
Authentication API routes.
|
||||||
|
|
||||||
|
Provides endpoints for:
|
||||||
|
- User registration and login
|
||||||
|
- Token refresh and logout
|
||||||
|
- Password reset
|
||||||
|
- Email verification
|
||||||
|
- Google OAuth
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import secrets
|
||||||
|
import time
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import APIRouter, BackgroundTasks, HTTPException, Request
|
||||||
|
from pydantic import BaseModel, EmailStr, Field
|
||||||
|
|
||||||
|
from backend.util.settings import Settings
|
||||||
|
|
||||||
|
from .email import get_auth_email_sender
|
||||||
|
from .service import AuthService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/auth", tags=["auth"])
|
||||||
|
|
||||||
|
# Singleton auth service instance
|
||||||
|
_auth_service: Optional[AuthService] = None
|
||||||
|
|
||||||
|
# In-memory state storage for OAuth CSRF protection
|
||||||
|
# Format: {state_token: {"created_at": timestamp, "redirect_uri": optional_uri}}
|
||||||
|
# In production, use Redis for distributed state management
|
||||||
|
_oauth_states: dict[str, dict] = {}
|
||||||
|
_STATE_TTL_SECONDS = 600 # 10 minutes
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_expired_states() -> None:
|
||||||
|
"""Remove expired OAuth states."""
|
||||||
|
now = time.time()
|
||||||
|
expired = [
|
||||||
|
k
|
||||||
|
for k, v in _oauth_states.items()
|
||||||
|
if now - v["created_at"] > _STATE_TTL_SECONDS
|
||||||
|
]
|
||||||
|
for k in expired:
|
||||||
|
del _oauth_states[k]
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_state() -> str:
|
||||||
|
"""Generate a cryptographically secure state token."""
|
||||||
|
_cleanup_expired_states()
|
||||||
|
state = secrets.token_urlsafe(32)
|
||||||
|
_oauth_states[state] = {"created_at": time.time()}
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_state(state: str) -> bool:
|
||||||
|
"""Validate and consume a state token."""
|
||||||
|
if state not in _oauth_states:
|
||||||
|
return False
|
||||||
|
state_data = _oauth_states.pop(state)
|
||||||
|
if time.time() - state_data["created_at"] > _STATE_TTL_SECONDS:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_auth_service() -> AuthService:
|
||||||
|
"""Get or create the auth service singleton."""
|
||||||
|
global _auth_service
|
||||||
|
if _auth_service is None:
|
||||||
|
_auth_service = AuthService()
|
||||||
|
return _auth_service
|
||||||
|
|
||||||
|
|
||||||
|
# ============= Request/Response Models =============
|
||||||
|
|
||||||
|
|
||||||
|
class RegisterRequest(BaseModel):
|
||||||
|
"""Request model for user registration."""
|
||||||
|
|
||||||
|
email: EmailStr
|
||||||
|
password: str = Field(..., min_length=8)
|
||||||
|
name: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class LoginRequest(BaseModel):
|
||||||
|
"""Request model for user login."""
|
||||||
|
|
||||||
|
email: EmailStr
|
||||||
|
password: str
|
||||||
|
|
||||||
|
|
||||||
|
class TokenResponse(BaseModel):
|
||||||
|
"""Response model for authentication tokens."""
|
||||||
|
|
||||||
|
access_token: str
|
||||||
|
refresh_token: str
|
||||||
|
token_type: str = "bearer"
|
||||||
|
expires_in: int
|
||||||
|
|
||||||
|
|
||||||
|
class RefreshRequest(BaseModel):
|
||||||
|
"""Request model for token refresh."""
|
||||||
|
|
||||||
|
refresh_token: str
|
||||||
|
|
||||||
|
|
||||||
|
class LogoutRequest(BaseModel):
|
||||||
|
"""Request model for logout."""
|
||||||
|
|
||||||
|
refresh_token: str
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetRequest(BaseModel):
|
||||||
|
"""Request model for password reset request."""
|
||||||
|
|
||||||
|
email: EmailStr
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordResetConfirm(BaseModel):
|
||||||
|
"""Request model for password reset confirmation."""
|
||||||
|
|
||||||
|
token: str
|
||||||
|
new_password: str = Field(..., min_length=8)
|
||||||
|
|
||||||
|
|
||||||
|
class MessageResponse(BaseModel):
|
||||||
|
"""Generic message response."""
|
||||||
|
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserResponse(BaseModel):
|
||||||
|
"""Response model for user info."""
|
||||||
|
|
||||||
|
id: str
|
||||||
|
email: str
|
||||||
|
name: Optional[str]
|
||||||
|
email_verified: bool
|
||||||
|
role: str
|
||||||
|
|
||||||
|
|
||||||
|
# ============= Auth Endpoints =============
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/register", response_model=TokenResponse)
|
||||||
|
async def register(request: RegisterRequest, background_tasks: BackgroundTasks):
|
||||||
|
"""
|
||||||
|
Register a new user with email and password.
|
||||||
|
|
||||||
|
Returns access and refresh tokens on successful registration.
|
||||||
|
Sends a verification email in the background.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = await auth_service.register_user(
|
||||||
|
email=request.email,
|
||||||
|
password=request.password,
|
||||||
|
name=request.name,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create verification token and send email in background
|
||||||
|
# This is non-critical - don't fail registration if email fails
|
||||||
|
try:
|
||||||
|
verification_token = await auth_service.create_email_verification_token(
|
||||||
|
user.id
|
||||||
|
)
|
||||||
|
email_sender = get_auth_email_sender()
|
||||||
|
background_tasks.add_task(
|
||||||
|
email_sender.send_email_verification,
|
||||||
|
to_email=user.email,
|
||||||
|
verification_token=verification_token,
|
||||||
|
user_name=user.name,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to queue verification email for {user.email}: {e}")
|
||||||
|
|
||||||
|
tokens = await auth_service.create_tokens(user)
|
||||||
|
return TokenResponse(**tokens)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/login", response_model=TokenResponse)
|
||||||
|
async def login(request: LoginRequest):
|
||||||
|
"""
|
||||||
|
Login with email and password.
|
||||||
|
|
||||||
|
Returns access and refresh tokens on successful authentication.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
user = await auth_service.authenticate_user(request.email, request.password)
|
||||||
|
if not user:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid email or password")
|
||||||
|
|
||||||
|
tokens = await auth_service.create_tokens(user)
|
||||||
|
return TokenResponse(**tokens)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/logout", response_model=MessageResponse)
|
||||||
|
async def logout(request: LogoutRequest):
|
||||||
|
"""
|
||||||
|
Logout by revoking the refresh token.
|
||||||
|
|
||||||
|
This invalidates the refresh token so it cannot be used to get new access tokens.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
revoked = await auth_service.revoke_refresh_token(request.refresh_token)
|
||||||
|
if not revoked:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid refresh token")
|
||||||
|
|
||||||
|
return MessageResponse(message="Successfully logged out")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/refresh", response_model=TokenResponse)
|
||||||
|
async def refresh_tokens(request: RefreshRequest):
|
||||||
|
"""
|
||||||
|
Refresh access token using a refresh token.
|
||||||
|
|
||||||
|
The old refresh token is invalidated and a new one is returned (token rotation).
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
tokens = await auth_service.refresh_access_token(request.refresh_token)
|
||||||
|
if not tokens:
|
||||||
|
raise HTTPException(status_code=401, detail="Invalid or expired refresh token")
|
||||||
|
|
||||||
|
return TokenResponse(**tokens)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/password-reset/request", response_model=MessageResponse)
|
||||||
|
async def request_password_reset(
|
||||||
|
request: PasswordResetRequest, background_tasks: BackgroundTasks
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Request a password reset email.
|
||||||
|
|
||||||
|
Always returns success to prevent email enumeration attacks.
|
||||||
|
If the email exists, a password reset email will be sent.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
user = await auth_service.get_user_by_email(request.email)
|
||||||
|
if user:
|
||||||
|
token = await auth_service.create_password_reset_token(user.id)
|
||||||
|
email_sender = get_auth_email_sender()
|
||||||
|
background_tasks.add_task(
|
||||||
|
email_sender.send_password_reset_email,
|
||||||
|
to_email=user.email,
|
||||||
|
reset_token=token,
|
||||||
|
user_name=user.name,
|
||||||
|
)
|
||||||
|
logger.info(f"Password reset email queued for user {user.id}")
|
||||||
|
|
||||||
|
# Always return success to prevent email enumeration
|
||||||
|
return MessageResponse(
|
||||||
|
message="If the email exists, a password reset link has been sent"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/password-reset/confirm", response_model=MessageResponse)
|
||||||
|
async def confirm_password_reset(request: PasswordResetConfirm):
|
||||||
|
"""
|
||||||
|
Reset password using a password reset token.
|
||||||
|
|
||||||
|
All existing sessions (refresh tokens) will be invalidated.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
success = await auth_service.reset_password(request.token, request.new_password)
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid or expired reset token")
|
||||||
|
|
||||||
|
return MessageResponse(message="Password has been reset successfully")
|
||||||
|
|
||||||
|
|
||||||
|
# ============= Email Verification Endpoints =============
|
||||||
|
|
||||||
|
|
||||||
|
class EmailVerificationRequest(BaseModel):
|
||||||
|
"""Request model for email verification."""
|
||||||
|
|
||||||
|
token: str
|
||||||
|
|
||||||
|
|
||||||
|
class ResendVerificationRequest(BaseModel):
|
||||||
|
"""Request model for resending verification email."""
|
||||||
|
|
||||||
|
email: EmailStr
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/email/verify", response_model=MessageResponse)
|
||||||
|
async def verify_email(request: EmailVerificationRequest):
|
||||||
|
"""
|
||||||
|
Verify email address using a verification token.
|
||||||
|
|
||||||
|
Marks the user's email as verified if the token is valid.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
success = await auth_service.verify_email_token(request.token)
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Invalid or expired verification token"
|
||||||
|
)
|
||||||
|
|
||||||
|
return MessageResponse(message="Email verified successfully")
|
||||||
|
|
||||||
|
|
||||||
|
@router.post("/email/resend-verification", response_model=MessageResponse)
|
||||||
|
async def resend_verification_email(
|
||||||
|
request: ResendVerificationRequest, background_tasks: BackgroundTasks
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Resend email verification email.
|
||||||
|
|
||||||
|
Always returns success to prevent email enumeration attacks.
|
||||||
|
If the email exists and is not verified, a new verification email will be sent.
|
||||||
|
"""
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
|
||||||
|
user = await auth_service.get_user_by_email(request.email)
|
||||||
|
if user and not user.emailVerified:
|
||||||
|
token = await auth_service.create_email_verification_token(user.id)
|
||||||
|
email_sender = get_auth_email_sender()
|
||||||
|
background_tasks.add_task(
|
||||||
|
email_sender.send_email_verification,
|
||||||
|
to_email=user.email,
|
||||||
|
verification_token=token,
|
||||||
|
user_name=user.name,
|
||||||
|
)
|
||||||
|
logger.info(f"Verification email queued for user {user.id}")
|
||||||
|
|
||||||
|
# Always return success to prevent email enumeration
|
||||||
|
return MessageResponse(
|
||||||
|
message="If the email exists and is not verified, a verification link has been sent"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ============= Google OAuth Endpoints =============
|
||||||
|
|
||||||
|
# Google userinfo endpoint for fetching user profile
|
||||||
|
GOOGLE_USERINFO_ENDPOINT = "https://www.googleapis.com/oauth2/v2/userinfo"
|
||||||
|
|
||||||
|
|
||||||
|
class GoogleLoginResponse(BaseModel):
|
||||||
|
"""Response model for Google OAuth login initiation."""
|
||||||
|
|
||||||
|
url: str
|
||||||
|
|
||||||
|
|
||||||
|
def _get_google_oauth_handler():
|
||||||
|
"""Get a configured GoogleOAuthHandler instance."""
|
||||||
|
# Lazy import to avoid circular imports
|
||||||
|
from backend.integrations.oauth.google import GoogleOAuthHandler
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
|
|
||||||
|
client_id = settings.secrets.google_client_id
|
||||||
|
client_secret = settings.secrets.google_client_secret
|
||||||
|
|
||||||
|
if not client_id or not client_secret:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail="Google OAuth is not configured. Set GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET.",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Construct the redirect URI - this should point to the frontend's callback
|
||||||
|
# which will then call our /auth/google/callback endpoint
|
||||||
|
frontend_base_url = settings.config.frontend_base_url or "http://localhost:3000"
|
||||||
|
redirect_uri = f"{frontend_base_url}/auth/callback"
|
||||||
|
|
||||||
|
return GoogleOAuthHandler(
|
||||||
|
client_id=client_id,
|
||||||
|
client_secret=client_secret,
|
||||||
|
redirect_uri=redirect_uri,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/google/login", response_model=GoogleLoginResponse)
|
||||||
|
async def google_login(request: Request):
|
||||||
|
"""
|
||||||
|
Initiate Google OAuth flow.
|
||||||
|
|
||||||
|
Returns the Google OAuth authorization URL to redirect the user to.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
handler = _get_google_oauth_handler()
|
||||||
|
state = _generate_state()
|
||||||
|
|
||||||
|
# Get the authorization URL with default scopes (email, profile, openid)
|
||||||
|
auth_url = handler.get_login_url(
|
||||||
|
scopes=[], # Will use DEFAULT_SCOPES from handler
|
||||||
|
state=state,
|
||||||
|
code_challenge=None, # Not using PKCE for server-side flow
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Generated Google OAuth URL for state: {state[:8]}...")
|
||||||
|
return GoogleLoginResponse(url=auth_url)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initiate Google OAuth: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to initiate Google OAuth")
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/google/callback", response_model=TokenResponse)
|
||||||
|
async def google_callback(request: Request, code: str, state: Optional[str] = None):
|
||||||
|
"""
|
||||||
|
Handle Google OAuth callback.
|
||||||
|
|
||||||
|
Exchanges the authorization code for user info and creates/updates the user.
|
||||||
|
Returns access and refresh tokens.
|
||||||
|
"""
|
||||||
|
# Validate state to prevent CSRF attacks
|
||||||
|
if not state or not _validate_state(state):
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid or missing OAuth state: {state[:8] if state else 'None'}..."
|
||||||
|
)
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid or expired OAuth state")
|
||||||
|
|
||||||
|
try:
|
||||||
|
handler = _get_google_oauth_handler()
|
||||||
|
|
||||||
|
# Exchange the authorization code for Google credentials
|
||||||
|
logger.info("Exchanging authorization code for tokens...")
|
||||||
|
google_creds = await handler.exchange_code_for_tokens(
|
||||||
|
code=code,
|
||||||
|
scopes=[], # Will use the scopes from the initial request
|
||||||
|
code_verifier=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
# The handler returns OAuth2Credentials with email in username field
|
||||||
|
email = google_creds.username
|
||||||
|
if not email:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Failed to retrieve email from Google"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fetch full user info to get Google user ID and name
|
||||||
|
# Lazy import to avoid circular imports
|
||||||
|
from google.auth.transport.requests import AuthorizedSession
|
||||||
|
from google.oauth2.credentials import Credentials
|
||||||
|
|
||||||
|
# We need to create Google Credentials object to use with AuthorizedSession
|
||||||
|
creds = Credentials(
|
||||||
|
token=google_creds.access_token.get_secret_value(),
|
||||||
|
refresh_token=(
|
||||||
|
google_creds.refresh_token.get_secret_value()
|
||||||
|
if google_creds.refresh_token
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
token_uri="https://oauth2.googleapis.com/token",
|
||||||
|
client_id=handler.client_id,
|
||||||
|
client_secret=handler.client_secret,
|
||||||
|
)
|
||||||
|
|
||||||
|
session = AuthorizedSession(creds)
|
||||||
|
userinfo_response = session.get(GOOGLE_USERINFO_ENDPOINT)
|
||||||
|
|
||||||
|
if not userinfo_response.ok:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to fetch Google userinfo: {userinfo_response.status_code}"
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Failed to fetch user info from Google"
|
||||||
|
)
|
||||||
|
|
||||||
|
userinfo = userinfo_response.json()
|
||||||
|
google_id = userinfo.get("id")
|
||||||
|
name = userinfo.get("name")
|
||||||
|
email_verified = userinfo.get("verified_email", False)
|
||||||
|
|
||||||
|
if not google_id:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400, detail="Failed to retrieve Google user ID"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Google OAuth successful for user: {email}")
|
||||||
|
|
||||||
|
# Create or update the user in our database
|
||||||
|
auth_service = get_auth_service()
|
||||||
|
user = await auth_service.create_or_update_google_user(
|
||||||
|
google_id=google_id,
|
||||||
|
email=email,
|
||||||
|
name=name,
|
||||||
|
email_verified=email_verified,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate our JWT tokens
|
||||||
|
tokens = await auth_service.create_tokens(user)
|
||||||
|
|
||||||
|
return TokenResponse(**tokens)
|
||||||
|
|
||||||
|
except HTTPException:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Google OAuth callback failed: {e}")
|
||||||
|
raise HTTPException(status_code=500, detail="Failed to complete Google OAuth")
|
||||||
499
autogpt_platform/backend/backend/server/auth/service.py
Normal file
499
autogpt_platform/backend/backend/server/auth/service.py
Normal file
@@ -0,0 +1,499 @@
|
|||||||
|
"""
|
||||||
|
Core authentication service for password verification and token management.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from typing import Optional, cast
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
from autogpt_libs.auth.config import get_settings
|
||||||
|
from autogpt_libs.auth.jwt_utils import (
|
||||||
|
create_access_token,
|
||||||
|
create_refresh_token,
|
||||||
|
hash_token,
|
||||||
|
)
|
||||||
|
from prisma.models import User as PrismaUser
|
||||||
|
from prisma.types import (
|
||||||
|
EmailVerificationTokenCreateInput,
|
||||||
|
PasswordResetTokenCreateInput,
|
||||||
|
ProfileCreateInput,
|
||||||
|
RefreshTokenCreateInput,
|
||||||
|
UserCreateInput,
|
||||||
|
)
|
||||||
|
|
||||||
|
from backend.data.db import prisma
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AuthService:
|
||||||
|
"""Handles authentication operations including password verification and token management."""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.settings = get_settings()
|
||||||
|
|
||||||
|
def hash_password(self, password: str) -> str:
|
||||||
|
"""Hash a password using bcrypt."""
|
||||||
|
return bcrypt.hashpw(password.encode(), bcrypt.gensalt()).decode()
|
||||||
|
|
||||||
|
def verify_password(self, password: str, hashed: str) -> bool:
|
||||||
|
"""Verify a password against a bcrypt hash."""
|
||||||
|
try:
|
||||||
|
return bcrypt.checkpw(password.encode(), hashed.encode())
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Password verification failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def register_user(
|
||||||
|
self,
|
||||||
|
email: str,
|
||||||
|
password: str,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
) -> PrismaUser:
|
||||||
|
"""
|
||||||
|
Register a new user with email and password.
|
||||||
|
|
||||||
|
Creates both a User record and a Profile record.
|
||||||
|
|
||||||
|
:param email: User's email address
|
||||||
|
:param password: User's password (will be hashed)
|
||||||
|
:param name: Optional display name
|
||||||
|
:return: Created user record
|
||||||
|
:raises ValueError: If email is already registered
|
||||||
|
"""
|
||||||
|
# Check if user already exists
|
||||||
|
existing = await prisma.user.find_unique(where={"email": email})
|
||||||
|
if existing:
|
||||||
|
raise ValueError("Email already registered")
|
||||||
|
|
||||||
|
password_hash = self.hash_password(password)
|
||||||
|
|
||||||
|
# Generate a unique username from email
|
||||||
|
base_username = email.split("@")[0].lower()
|
||||||
|
# Remove any characters that aren't alphanumeric or underscore
|
||||||
|
base_username = re.sub(r"[^a-z0-9_]", "", base_username)
|
||||||
|
if not base_username:
|
||||||
|
base_username = "user"
|
||||||
|
|
||||||
|
# Check if username is unique, if not add a number suffix
|
||||||
|
username = base_username
|
||||||
|
counter = 1
|
||||||
|
while await prisma.profile.find_unique(where={"username": username}):
|
||||||
|
username = f"{base_username}{counter}"
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
user = await prisma.user.create(
|
||||||
|
data=cast(
|
||||||
|
UserCreateInput,
|
||||||
|
{
|
||||||
|
"email": email,
|
||||||
|
"passwordHash": password_hash,
|
||||||
|
"name": name,
|
||||||
|
"emailVerified": False,
|
||||||
|
"role": "authenticated",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create profile for the user
|
||||||
|
display_name = name or base_username
|
||||||
|
await prisma.profile.create(
|
||||||
|
data=cast(
|
||||||
|
ProfileCreateInput,
|
||||||
|
{
|
||||||
|
"userId": user.id,
|
||||||
|
"name": display_name,
|
||||||
|
"username": username,
|
||||||
|
"description": "",
|
||||||
|
"links": [],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Registered new user: {user.id} with profile username: {username}")
|
||||||
|
return user
|
||||||
|
|
||||||
|
async def authenticate_user(
|
||||||
|
self, email: str, password: str
|
||||||
|
) -> Optional[PrismaUser]:
|
||||||
|
"""
|
||||||
|
Authenticate a user with email and password.
|
||||||
|
|
||||||
|
:param email: User's email address
|
||||||
|
:param password: User's password
|
||||||
|
:return: User record if authentication successful, None otherwise
|
||||||
|
"""
|
||||||
|
user = await prisma.user.find_unique(where={"email": email})
|
||||||
|
|
||||||
|
if not user:
|
||||||
|
logger.debug(f"Authentication failed: user not found for email {email}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not user.passwordHash:
|
||||||
|
logger.debug(
|
||||||
|
f"Authentication failed: no password set for user {user.id} "
|
||||||
|
"(likely OAuth-only user)"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.verify_password(password, user.passwordHash):
|
||||||
|
logger.debug(f"Authentication successful for user {user.id}")
|
||||||
|
return user
|
||||||
|
|
||||||
|
logger.debug(f"Authentication failed: invalid password for user {user.id}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def create_tokens(self, user: PrismaUser) -> dict:
|
||||||
|
"""
|
||||||
|
Create access and refresh tokens for a user.
|
||||||
|
|
||||||
|
:param user: The user to create tokens for
|
||||||
|
:return: Dictionary with access_token, refresh_token, token_type, and expires_in
|
||||||
|
"""
|
||||||
|
# Create access token
|
||||||
|
access_token = create_access_token(
|
||||||
|
user_id=user.id,
|
||||||
|
email=user.email,
|
||||||
|
role=user.role or "authenticated",
|
||||||
|
email_verified=user.emailVerified,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create and store refresh token
|
||||||
|
raw_refresh_token, hashed_refresh_token = create_refresh_token()
|
||||||
|
expires_at = datetime.now(timezone.utc) + timedelta(
|
||||||
|
days=self.settings.REFRESH_TOKEN_EXPIRE_DAYS
|
||||||
|
)
|
||||||
|
|
||||||
|
await prisma.refreshtoken.create(
|
||||||
|
data=cast(
|
||||||
|
RefreshTokenCreateInput,
|
||||||
|
{
|
||||||
|
"token": hashed_refresh_token,
|
||||||
|
"userId": user.id,
|
||||||
|
"expiresAt": expires_at,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Created tokens for user {user.id}")
|
||||||
|
|
||||||
|
return {
|
||||||
|
"access_token": access_token,
|
||||||
|
"refresh_token": raw_refresh_token,
|
||||||
|
"token_type": "bearer",
|
||||||
|
"expires_in": self.settings.ACCESS_TOKEN_EXPIRE_MINUTES * 60,
|
||||||
|
}
|
||||||
|
|
||||||
|
async def refresh_access_token(self, refresh_token: str) -> Optional[dict]:
|
||||||
|
"""
|
||||||
|
Refresh an access token using a refresh token.
|
||||||
|
|
||||||
|
Implements token rotation: the old refresh token is revoked and a new one is issued.
|
||||||
|
|
||||||
|
:param refresh_token: The refresh token
|
||||||
|
:return: New tokens if successful, None if refresh token is invalid/expired
|
||||||
|
"""
|
||||||
|
hashed_token = hash_token(refresh_token)
|
||||||
|
|
||||||
|
# Find the refresh token
|
||||||
|
stored_token = await prisma.refreshtoken.find_first(
|
||||||
|
where={
|
||||||
|
"token": hashed_token,
|
||||||
|
"revokedAt": None,
|
||||||
|
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
||||||
|
},
|
||||||
|
include={"User": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
if not stored_token or not stored_token.User:
|
||||||
|
logger.debug("Refresh token not found or expired")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Revoke the old token (token rotation)
|
||||||
|
await prisma.refreshtoken.update(
|
||||||
|
where={"id": stored_token.id},
|
||||||
|
data={"revokedAt": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Refreshed tokens for user {stored_token.User.id}")
|
||||||
|
|
||||||
|
# Create new tokens
|
||||||
|
return await self.create_tokens(stored_token.User)
|
||||||
|
|
||||||
|
async def revoke_refresh_token(self, refresh_token: str) -> bool:
|
||||||
|
"""
|
||||||
|
Revoke a refresh token (logout).
|
||||||
|
|
||||||
|
:param refresh_token: The refresh token to revoke
|
||||||
|
:return: True if token was found and revoked, False otherwise
|
||||||
|
"""
|
||||||
|
hashed_token = hash_token(refresh_token)
|
||||||
|
|
||||||
|
result = await prisma.refreshtoken.update_many(
|
||||||
|
where={"token": hashed_token, "revokedAt": None},
|
||||||
|
data={"revokedAt": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
if result > 0:
|
||||||
|
logger.debug("Refresh token revoked")
|
||||||
|
return True
|
||||||
|
|
||||||
|
logger.debug("Refresh token not found or already revoked")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def revoke_all_user_tokens(self, user_id: str) -> int:
|
||||||
|
"""
|
||||||
|
Revoke all refresh tokens for a user (logout from all devices).
|
||||||
|
|
||||||
|
:param user_id: The user's ID
|
||||||
|
:return: Number of tokens revoked
|
||||||
|
"""
|
||||||
|
result = await prisma.refreshtoken.update_many(
|
||||||
|
where={"userId": user_id, "revokedAt": None},
|
||||||
|
data={"revokedAt": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Revoked {result} tokens for user {user_id}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def get_user_by_google_id(self, google_id: str) -> Optional[PrismaUser]:
|
||||||
|
"""Get a user by their Google OAuth ID."""
|
||||||
|
return await prisma.user.find_unique(where={"googleId": google_id})
|
||||||
|
|
||||||
|
async def get_user_by_email(self, email: str) -> Optional[PrismaUser]:
|
||||||
|
"""Get a user by their email address."""
|
||||||
|
return await prisma.user.find_unique(where={"email": email})
|
||||||
|
|
||||||
|
async def create_or_update_google_user(
|
||||||
|
self,
|
||||||
|
google_id: str,
|
||||||
|
email: str,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
email_verified: bool = False,
|
||||||
|
) -> PrismaUser:
|
||||||
|
"""
|
||||||
|
Create or update a user from Google OAuth.
|
||||||
|
|
||||||
|
If a user with the Google ID exists, return them.
|
||||||
|
If a user with the email exists but no Google ID, link the account.
|
||||||
|
Otherwise, create a new user.
|
||||||
|
|
||||||
|
:param google_id: Google's unique user ID
|
||||||
|
:param email: User's email from Google
|
||||||
|
:param name: User's name from Google
|
||||||
|
:param email_verified: Whether Google has verified the email
|
||||||
|
:return: The user record
|
||||||
|
"""
|
||||||
|
# Check if user exists with this Google ID
|
||||||
|
user = await self.get_user_by_google_id(google_id)
|
||||||
|
if user:
|
||||||
|
return user
|
||||||
|
|
||||||
|
# Check if user exists with this email
|
||||||
|
user = await self.get_user_by_email(email)
|
||||||
|
if user:
|
||||||
|
# Link Google account to existing user
|
||||||
|
updated_user = await prisma.user.update(
|
||||||
|
where={"id": user.id},
|
||||||
|
data={
|
||||||
|
"googleId": google_id,
|
||||||
|
"emailVerified": email_verified or user.emailVerified,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if updated_user:
|
||||||
|
logger.info(f"Linked Google account to existing user {updated_user.id}")
|
||||||
|
return updated_user
|
||||||
|
return user
|
||||||
|
|
||||||
|
# Create new user with profile
|
||||||
|
# Generate a unique username from email
|
||||||
|
base_username = email.split("@")[0].lower()
|
||||||
|
base_username = re.sub(r"[^a-z0-9_]", "", base_username)
|
||||||
|
if not base_username:
|
||||||
|
base_username = "user"
|
||||||
|
|
||||||
|
username = base_username
|
||||||
|
counter = 1
|
||||||
|
while await prisma.profile.find_unique(where={"username": username}):
|
||||||
|
username = f"{base_username}{counter}"
|
||||||
|
counter += 1
|
||||||
|
|
||||||
|
user = await prisma.user.create(
|
||||||
|
data=cast(
|
||||||
|
UserCreateInput,
|
||||||
|
{
|
||||||
|
"email": email,
|
||||||
|
"googleId": google_id,
|
||||||
|
"name": name,
|
||||||
|
"emailVerified": email_verified,
|
||||||
|
"role": "authenticated",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create profile for the user
|
||||||
|
display_name = name or base_username
|
||||||
|
await prisma.profile.create(
|
||||||
|
data=cast(
|
||||||
|
ProfileCreateInput,
|
||||||
|
{
|
||||||
|
"userId": user.id,
|
||||||
|
"name": display_name,
|
||||||
|
"username": username,
|
||||||
|
"description": "",
|
||||||
|
"links": [],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Created new user from Google OAuth: {user.id} with profile: {username}"
|
||||||
|
)
|
||||||
|
return user
|
||||||
|
|
||||||
|
async def create_password_reset_token(self, user_id: str) -> str:
|
||||||
|
"""
|
||||||
|
Create a password reset token for a user.
|
||||||
|
|
||||||
|
:param user_id: The user's ID
|
||||||
|
:return: The raw token to send to the user
|
||||||
|
"""
|
||||||
|
raw_token, hashed_token = create_refresh_token() # Reuse token generation
|
||||||
|
expires_at = datetime.now(timezone.utc) + timedelta(hours=1)
|
||||||
|
|
||||||
|
await prisma.passwordresettoken.create(
|
||||||
|
data=cast(
|
||||||
|
PasswordResetTokenCreateInput,
|
||||||
|
{
|
||||||
|
"token": hashed_token,
|
||||||
|
"userId": user_id,
|
||||||
|
"expiresAt": expires_at,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return raw_token
|
||||||
|
|
||||||
|
async def create_email_verification_token(self, user_id: str) -> str:
|
||||||
|
"""
|
||||||
|
Create an email verification token for a user.
|
||||||
|
|
||||||
|
:param user_id: The user's ID
|
||||||
|
:return: The raw token to send to the user
|
||||||
|
"""
|
||||||
|
raw_token, hashed_token = create_refresh_token() # Reuse token generation
|
||||||
|
expires_at = datetime.now(timezone.utc) + timedelta(hours=24)
|
||||||
|
|
||||||
|
await prisma.emailverificationtoken.create(
|
||||||
|
data=cast(
|
||||||
|
EmailVerificationTokenCreateInput,
|
||||||
|
{
|
||||||
|
"token": hashed_token,
|
||||||
|
"userId": user_id,
|
||||||
|
"expiresAt": expires_at,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return raw_token
|
||||||
|
|
||||||
|
async def verify_email_token(self, token: str) -> bool:
|
||||||
|
"""
|
||||||
|
Verify an email verification token and mark the user's email as verified.
|
||||||
|
|
||||||
|
:param token: The raw token from the user
|
||||||
|
:return: True if successful, False if token is invalid
|
||||||
|
"""
|
||||||
|
hashed_token = hash_token(token)
|
||||||
|
|
||||||
|
# Find and validate token
|
||||||
|
stored_token = await prisma.emailverificationtoken.find_first(
|
||||||
|
where={
|
||||||
|
"token": hashed_token,
|
||||||
|
"usedAt": None,
|
||||||
|
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not stored_token:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Mark email as verified
|
||||||
|
await prisma.user.update(
|
||||||
|
where={"id": stored_token.userId},
|
||||||
|
data={"emailVerified": True},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mark token as used
|
||||||
|
await prisma.emailverificationtoken.update(
|
||||||
|
where={"id": stored_token.id},
|
||||||
|
data={"usedAt": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Email verified for user {stored_token.userId}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def verify_password_reset_token(self, token: str) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Verify a password reset token and return the user ID.
|
||||||
|
|
||||||
|
:param token: The raw token from the user
|
||||||
|
:return: User ID if valid, None otherwise
|
||||||
|
"""
|
||||||
|
hashed_token = hash_token(token)
|
||||||
|
|
||||||
|
stored_token = await prisma.passwordresettoken.find_first(
|
||||||
|
where={
|
||||||
|
"token": hashed_token,
|
||||||
|
"usedAt": None,
|
||||||
|
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not stored_token:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return stored_token.userId
|
||||||
|
|
||||||
|
async def reset_password(self, token: str, new_password: str) -> bool:
|
||||||
|
"""
|
||||||
|
Reset a user's password using a password reset token.
|
||||||
|
|
||||||
|
:param token: The password reset token
|
||||||
|
:param new_password: The new password
|
||||||
|
:return: True if successful, False if token is invalid
|
||||||
|
"""
|
||||||
|
hashed_token = hash_token(token)
|
||||||
|
|
||||||
|
# Find and validate token
|
||||||
|
stored_token = await prisma.passwordresettoken.find_first(
|
||||||
|
where={
|
||||||
|
"token": hashed_token,
|
||||||
|
"usedAt": None,
|
||||||
|
"expiresAt": {"gt": datetime.now(timezone.utc)},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if not stored_token:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Update password
|
||||||
|
password_hash = self.hash_password(new_password)
|
||||||
|
await prisma.user.update(
|
||||||
|
where={"id": stored_token.userId},
|
||||||
|
data={"passwordHash": password_hash},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mark token as used
|
||||||
|
await prisma.passwordresettoken.update(
|
||||||
|
where={"id": stored_token.id},
|
||||||
|
data={"usedAt": datetime.now(timezone.utc)},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Revoke all refresh tokens for security
|
||||||
|
await self.revoke_all_user_tokens(stored_token.userId)
|
||||||
|
|
||||||
|
logger.info(f"Password reset for user {stored_token.userId}")
|
||||||
|
return True
|
||||||
@@ -0,0 +1,302 @@
|
|||||||
|
{# Base Template for Auth Emails #}
|
||||||
|
{# Template variables:
|
||||||
|
data.message: the message to display in the email
|
||||||
|
data.title: the title of the email
|
||||||
|
data.unsubscribe_link: the link to unsubscribe from the email (optional for auth emails)
|
||||||
|
#}
|
||||||
|
<!doctype html>
|
||||||
|
<html lang="ltr" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office">
|
||||||
|
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=yes">
|
||||||
|
<meta name="format-detection" content="telephone=no, date=no, address=no, email=no, url=no">
|
||||||
|
<meta name="x-apple-disable-message-reformatting">
|
||||||
|
<!--[if !mso]>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<![endif]-->
|
||||||
|
<!--[if mso]>
|
||||||
|
<style>
|
||||||
|
* { font-family: sans-serif !important; }
|
||||||
|
</style>
|
||||||
|
<noscript>
|
||||||
|
<xml>
|
||||||
|
<o:OfficeDocumentSettings>
|
||||||
|
<o:PixelsPerInch>96</o:PixelsPerInch>
|
||||||
|
</o:OfficeDocumentSettings>
|
||||||
|
</xml>
|
||||||
|
</noscript>
|
||||||
|
<![endif]-->
|
||||||
|
<style type="text/css">
|
||||||
|
/* RESET STYLES */
|
||||||
|
html,
|
||||||
|
body {
|
||||||
|
margin: 0 !important;
|
||||||
|
padding: 0 !important;
|
||||||
|
width: 100% !important;
|
||||||
|
height: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
-webkit-font-smoothing: antialiased;
|
||||||
|
-moz-osx-font-smoothing: grayscale;
|
||||||
|
text-rendering: optimizeLegibility;
|
||||||
|
}
|
||||||
|
|
||||||
|
.document {
|
||||||
|
margin: 0 !important;
|
||||||
|
padding: 0 !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
border: 0;
|
||||||
|
outline: none;
|
||||||
|
text-decoration: none;
|
||||||
|
-ms-interpolation-mode: bicubic;
|
||||||
|
}
|
||||||
|
|
||||||
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
|
||||||
|
table,
|
||||||
|
td {
|
||||||
|
mso-table-lspace: 0pt;
|
||||||
|
mso-table-rspace: 0pt;
|
||||||
|
}
|
||||||
|
|
||||||
|
body,
|
||||||
|
table,
|
||||||
|
td,
|
||||||
|
a {
|
||||||
|
-webkit-text-size-adjust: 100%;
|
||||||
|
-ms-text-size-adjust: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
p {
|
||||||
|
margin: 0;
|
||||||
|
word-break: break-word;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* iOS BLUE LINKS */
|
||||||
|
a[x-apple-data-detectors] {
|
||||||
|
color: inherit !important;
|
||||||
|
text-decoration: none !important;
|
||||||
|
font-size: inherit !important;
|
||||||
|
font-family: inherit !important;
|
||||||
|
font-weight: inherit !important;
|
||||||
|
line-height: inherit !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ANDROID CENTER FIX */
|
||||||
|
div[style*="margin: 16px 0;"] {
|
||||||
|
margin: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* MEDIA QUERIES */
|
||||||
|
@media all and (max-width:639px) {
|
||||||
|
.wrapper {
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.container {
|
||||||
|
width: 100% !important;
|
||||||
|
min-width: 100% !important;
|
||||||
|
padding: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.row {
|
||||||
|
padding-left: 20px !important;
|
||||||
|
padding-right: 20px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.col-mobile {
|
||||||
|
width: 20px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.col {
|
||||||
|
display: block !important;
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-center {
|
||||||
|
text-align: center !important;
|
||||||
|
float: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-mx-auto {
|
||||||
|
margin: 0 auto !important;
|
||||||
|
float: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-left {
|
||||||
|
text-align: center !important;
|
||||||
|
float: left !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mobile-hide {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.img {
|
||||||
|
width: 100% !important;
|
||||||
|
height: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ml-btn {
|
||||||
|
width: 100% !important;
|
||||||
|
max-width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ml-btn-container {
|
||||||
|
width: 100% !important;
|
||||||
|
max-width: 100% !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<style type="text/css">
|
||||||
|
@import url("https://assets.mlcdn.com/fonts-v2.css?version=1729862");
|
||||||
|
</style>
|
||||||
|
<style type="text/css">
|
||||||
|
@media screen {
|
||||||
|
body {
|
||||||
|
font-family: 'Poppins', sans-serif;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
<title>{{data.title}}</title>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body style="margin: 0 !important; padding: 0 !important; background-color:#070629;">
|
||||||
|
<div class="document" role="article" aria-roledescription="email" aria-label lang dir="ltr"
|
||||||
|
style="background-color:#070629; line-height: 100%; font-size:medium; font-size:max(16px, 1rem);">
|
||||||
|
<!-- Main Content -->
|
||||||
|
<table width="100%" align="center" cellspacing="0" cellpadding="0" border="0">
|
||||||
|
<tr>
|
||||||
|
<td class="background" bgcolor="#070629" align="center" valign="top" style="padding: 0 8px;">
|
||||||
|
<!-- Email Content -->
|
||||||
|
<table class="container" align="center" width="640" cellpadding="0" cellspacing="0" border="0"
|
||||||
|
style="max-width: 640px;">
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<!-- Logo Section -->
|
||||||
|
<table class="container ml-4 ml-default-border" width="640" bgcolor="#E2ECFD" align="center" border="0"
|
||||||
|
cellspacing="0" cellpadding="0" style="width: 640px; min-width: 640px;">
|
||||||
|
<tr>
|
||||||
|
<td class="ml-default-border container" height="40" style="line-height: 40px; min-width: 640px;">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<table align="center" width="100%" border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td class="row" align="center" style="padding: 0 50px;">
|
||||||
|
<img
|
||||||
|
src="https://storage.mlcdn.com/account_image/597379/8QJ8kOjXakVvfe1kJLY2wWCObU1mp5EiDLfBlbQa.png"
|
||||||
|
border="0" alt="" width="120" class="logo"
|
||||||
|
style="max-width: 120px; display: inline-block;">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- Main Content Section -->
|
||||||
|
<table class="container ml-6 ml-default-border" width="640" bgcolor="#E2ECFD" align="center" border="0"
|
||||||
|
cellspacing="0" cellpadding="0" style="color: #070629; width: 640px; min-width: 640px;">
|
||||||
|
<tr>
|
||||||
|
<td class="row" style="padding: 0 50px;">
|
||||||
|
{{data.message|safe}}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<!-- Footer Section -->
|
||||||
|
<table class="container ml-10 ml-default-border" width="640" bgcolor="#ffffff" align="center" border="0"
|
||||||
|
cellspacing="0" cellpadding="0" style="width: 640px; min-width: 640px;">
|
||||||
|
<tr>
|
||||||
|
<td class="row" style="padding: 0 50px;">
|
||||||
|
<table align="center" width="100%" border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td height="20" style="line-height: 20px;"></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<!-- Footer Content -->
|
||||||
|
<table align="center" width="100%" border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td class="col" align="left" valign="middle" width="120">
|
||||||
|
<img
|
||||||
|
src="https://storage.mlcdn.com/account_image/597379/8QJ8kOjXakVvfe1kJLY2wWCObU1mp5EiDLfBlbQa.png"
|
||||||
|
border="0" alt="" width="120" class="logo"
|
||||||
|
style="max-width: 120px; display: inline-block;">
|
||||||
|
</td>
|
||||||
|
<td class="col" width="40" height="30" style="line-height: 30px;"></td>
|
||||||
|
<td class="col mobile-left" align="right" valign="middle" width="250">
|
||||||
|
<table role="presentation" cellpadding="0" cellspacing="0" border="0">
|
||||||
|
<tr>
|
||||||
|
<td align="center" valign="middle" width="18" style="padding: 0 5px 0 0;">
|
||||||
|
<a href="https://x.com/auto_gpt" target="blank" style="text-decoration: none;">
|
||||||
|
<img
|
||||||
|
src="https://assets.mlcdn.com/ml/images/icons/default/rounded_corners/black/x.png"
|
||||||
|
width="18" alt="x">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td align="center" valign="middle" width="18" style="padding: 0 5px;">
|
||||||
|
<a href="https://discord.gg/autogpt" target="blank"
|
||||||
|
style="text-decoration: none;">
|
||||||
|
<img
|
||||||
|
src="https://assets.mlcdn.com/ml/images/icons/default/rounded_corners/black/discord.png"
|
||||||
|
width="18" alt="discord">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td align="center" valign="middle" width="18" style="padding: 0 0 0 5px;">
|
||||||
|
<a href="https://agpt.co/" target="blank" style="text-decoration: none;">
|
||||||
|
<img
|
||||||
|
src="https://assets.mlcdn.com/ml/images/icons/default/rounded_corners/black/website.png"
|
||||||
|
width="18" alt="website">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td height="15" style="line-height: 15px;"></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="text-align: left!important;">
|
||||||
|
<p
|
||||||
|
style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 12px; line-height: 150%; display: inline-block; margin-bottom: 0;">
|
||||||
|
This is an automated security email from AutoGPT. If you did not request this action, please ignore this email or contact support if you have concerns.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td height="20" style="line-height: 20px;"></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
{# Email Verification Template #}
|
||||||
|
{# Variables:
|
||||||
|
verification_link: URL for email verification
|
||||||
|
user_name: Optional user name for personalization
|
||||||
|
frontend_url: Base frontend URL
|
||||||
|
#}
|
||||||
|
<table align="center" width="100%" border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td height="30" style="line-height: 30px;"></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<h1 style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 28px; line-height: 125%; font-weight: bold; margin-bottom: 20px;">
|
||||||
|
Verify Your Email
|
||||||
|
</h1>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
{% if user_name %}Hi {{ user_name }},{% else %}Hi,{% endif %}
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
Welcome to AutoGPT! Please verify your email address by clicking the button below:
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="padding: 20px 0;">
|
||||||
|
<table border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td align="center" bgcolor="#4285F4" style="border-radius: 8px;">
|
||||||
|
<a href="{{ verification_link }}" target="_blank"
|
||||||
|
style="display: inline-block; padding: 16px 36px; font-family: 'Poppins', sans-serif; font-size: 16px; font-weight: 600; color: #ffffff; text-decoration: none; border-radius: 8px;">
|
||||||
|
Verify Email
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
This link will expire in <strong>24 hours</strong>.
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
If you didn't create an account with AutoGPT, you can safely ignore this email.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #888888; font-size: 14px; line-height: 165%; margin-bottom: 10px;">
|
||||||
|
If the button doesn't work, copy and paste this link into your browser:
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #4285F4; font-size: 14px; line-height: 165%; word-break: break-all;">
|
||||||
|
<a href="{{ verification_link }}" style="color: #4285F4; text-decoration: underline;">{{ verification_link }}</a>
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td height="30" style="line-height: 30px;"></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
{# Password Reset Email Template #}
|
||||||
|
{# Variables:
|
||||||
|
reset_link: URL for password reset
|
||||||
|
user_name: Optional user name for personalization
|
||||||
|
frontend_url: Base frontend URL
|
||||||
|
#}
|
||||||
|
<table align="center" width="100%" border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td height="30" style="line-height: 30px;"></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center">
|
||||||
|
<h1 style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 28px; line-height: 125%; font-weight: bold; margin-bottom: 20px;">
|
||||||
|
Reset Your Password
|
||||||
|
</h1>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
{% if user_name %}Hi {{ user_name }},{% else %}Hi,{% endif %}
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
We received a request to reset your password for your AutoGPT account. Click the button below to create a new password:
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="center" style="padding: 20px 0;">
|
||||||
|
<table border="0" cellspacing="0" cellpadding="0">
|
||||||
|
<tr>
|
||||||
|
<td align="center" bgcolor="#4285F4" style="border-radius: 8px;">
|
||||||
|
<a href="{{ reset_link }}" target="_blank"
|
||||||
|
style="display: inline-block; padding: 16px 36px; font-family: 'Poppins', sans-serif; font-size: 16px; font-weight: 600; color: #ffffff; text-decoration: none; border-radius: 8px;">
|
||||||
|
Reset Password
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
This link will expire in <strong>1 hour</strong> for security reasons.
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #070629; font-size: 16px; line-height: 165%; margin-bottom: 20px;">
|
||||||
|
If you didn't request a password reset, you can safely ignore this email. Your password will remain unchanged.
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td align="left">
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #888888; font-size: 14px; line-height: 165%; margin-bottom: 10px;">
|
||||||
|
If the button doesn't work, copy and paste this link into your browser:
|
||||||
|
</p>
|
||||||
|
<p style="font-family: 'Poppins', sans-serif; color: #4285F4; font-size: 14px; line-height: 165%; word-break: break-all;">
|
||||||
|
<a href="{{ reset_link }}" style="color: #4285F4; text-decoration: underline;">{{ reset_link }}</a>
|
||||||
|
</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td height="30" style="line-height: 30px;"></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
@@ -21,6 +21,7 @@ import backend.data.db
|
|||||||
import backend.data.graph
|
import backend.data.graph
|
||||||
import backend.data.user
|
import backend.data.user
|
||||||
import backend.integrations.webhooks.utils
|
import backend.integrations.webhooks.utils
|
||||||
|
import backend.server.auth
|
||||||
import backend.server.routers.oauth
|
import backend.server.routers.oauth
|
||||||
import backend.server.routers.postmark.postmark
|
import backend.server.routers.postmark.postmark
|
||||||
import backend.server.routers.v1
|
import backend.server.routers.v1
|
||||||
@@ -255,6 +256,7 @@ app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
|||||||
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
||||||
|
|
||||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"], prefix="/api")
|
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"], prefix="/api")
|
||||||
|
app.include_router(backend.server.auth.auth_router, tags=["auth"], prefix="/api")
|
||||||
app.include_router(
|
app.include_router(
|
||||||
backend.server.v2.store.routes.router, tags=["v2"], prefix="/api/store"
|
backend.server.v2.store.routes.router, tags=["v2"], prefix="/api/store"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ import base64
|
|||||||
import hashlib
|
import hashlib
|
||||||
import secrets
|
import secrets
|
||||||
import uuid
|
import uuid
|
||||||
from typing import AsyncGenerator
|
from typing import AsyncGenerator, cast
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
import pytest
|
import pytest
|
||||||
@@ -27,6 +27,13 @@ from prisma.models import OAuthApplication as PrismaOAuthApplication
|
|||||||
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
||||||
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
||||||
from prisma.models import User as PrismaUser
|
from prisma.models import User as PrismaUser
|
||||||
|
from prisma.types import (
|
||||||
|
OAuthAccessTokenCreateInput,
|
||||||
|
OAuthApplicationCreateInput,
|
||||||
|
OAuthAuthorizationCodeCreateInput,
|
||||||
|
OAuthRefreshTokenCreateInput,
|
||||||
|
UserCreateInput,
|
||||||
|
)
|
||||||
|
|
||||||
from backend.server.rest_api import app
|
from backend.server.rest_api import app
|
||||||
|
|
||||||
@@ -48,11 +55,14 @@ def test_user_id() -> str:
|
|||||||
async def test_user(server, test_user_id: str):
|
async def test_user(server, test_user_id: str):
|
||||||
"""Create a test user in the database."""
|
"""Create a test user in the database."""
|
||||||
await PrismaUser.prisma().create(
|
await PrismaUser.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": test_user_id,
|
UserCreateInput,
|
||||||
"email": f"oauth-test-{test_user_id}@example.com",
|
{
|
||||||
"name": "OAuth Test User",
|
"id": test_user_id,
|
||||||
}
|
"email": f"oauth-test-{test_user_id}@example.com",
|
||||||
|
"name": "OAuth Test User",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
yield test_user_id
|
yield test_user_id
|
||||||
@@ -77,22 +87,25 @@ async def test_oauth_app(test_user: str):
|
|||||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||||
|
|
||||||
await PrismaOAuthApplication.prisma().create(
|
await PrismaOAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": app_id,
|
OAuthApplicationCreateInput,
|
||||||
"name": "Test OAuth App",
|
{
|
||||||
"description": "Test application for integration tests",
|
"id": app_id,
|
||||||
"clientId": client_id,
|
"name": "Test OAuth App",
|
||||||
"clientSecret": client_secret_hash,
|
"description": "Test application for integration tests",
|
||||||
"clientSecretSalt": client_secret_salt,
|
"clientId": client_id,
|
||||||
"redirectUris": [
|
"clientSecret": client_secret_hash,
|
||||||
"https://example.com/callback",
|
"clientSecretSalt": client_secret_salt,
|
||||||
"http://localhost:3000/callback",
|
"redirectUris": [
|
||||||
],
|
"https://example.com/callback",
|
||||||
"grantTypes": ["authorization_code", "refresh_token"],
|
"http://localhost:3000/callback",
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
],
|
||||||
"ownerId": test_user,
|
"grantTypes": ["authorization_code", "refresh_token"],
|
||||||
"isActive": True,
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||||
}
|
"ownerId": test_user,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
yield {
|
yield {
|
||||||
@@ -296,19 +309,22 @@ async def inactive_oauth_app(test_user: str):
|
|||||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||||
|
|
||||||
await PrismaOAuthApplication.prisma().create(
|
await PrismaOAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": app_id,
|
OAuthApplicationCreateInput,
|
||||||
"name": "Inactive OAuth App",
|
{
|
||||||
"description": "Inactive test application",
|
"id": app_id,
|
||||||
"clientId": client_id,
|
"name": "Inactive OAuth App",
|
||||||
"clientSecret": client_secret_hash,
|
"description": "Inactive test application",
|
||||||
"clientSecretSalt": client_secret_salt,
|
"clientId": client_id,
|
||||||
"redirectUris": ["https://example.com/callback"],
|
"clientSecret": client_secret_hash,
|
||||||
"grantTypes": ["authorization_code", "refresh_token"],
|
"clientSecretSalt": client_secret_salt,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"redirectUris": ["https://example.com/callback"],
|
||||||
"ownerId": test_user,
|
"grantTypes": ["authorization_code", "refresh_token"],
|
||||||
"isActive": False, # Inactive!
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
}
|
"ownerId": test_user,
|
||||||
|
"isActive": False, # Inactive!
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
yield {
|
yield {
|
||||||
@@ -699,14 +715,17 @@ async def test_token_authorization_code_expired(
|
|||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
await PrismaOAuthAuthorizationCode.prisma().create(
|
await PrismaOAuthAuthorizationCode.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"code": expired_code,
|
OAuthAuthorizationCodeCreateInput,
|
||||||
"applicationId": test_oauth_app["id"],
|
{
|
||||||
"userId": test_user,
|
"code": expired_code,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"applicationId": test_oauth_app["id"],
|
||||||
"redirectUri": test_oauth_app["redirect_uri"],
|
"userId": test_user,
|
||||||
"expiresAt": now - timedelta(hours=1), # Already expired
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
}
|
"redirectUri": test_oauth_app["redirect_uri"],
|
||||||
|
"expiresAt": now - timedelta(hours=1), # Already expired
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
@@ -942,13 +961,16 @@ async def test_token_refresh_expired(
|
|||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
await PrismaOAuthRefreshToken.prisma().create(
|
await PrismaOAuthRefreshToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"token": expired_token_hash,
|
OAuthRefreshTokenCreateInput,
|
||||||
"applicationId": test_oauth_app["id"],
|
{
|
||||||
"userId": test_user,
|
"token": expired_token_hash,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"applicationId": test_oauth_app["id"],
|
||||||
"expiresAt": now - timedelta(days=1), # Already expired
|
"userId": test_user,
|
||||||
}
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
|
"expiresAt": now - timedelta(days=1), # Already expired
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
@@ -980,14 +1002,17 @@ async def test_token_refresh_revoked(
|
|||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
await PrismaOAuthRefreshToken.prisma().create(
|
await PrismaOAuthRefreshToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"token": revoked_token_hash,
|
OAuthRefreshTokenCreateInput,
|
||||||
"applicationId": test_oauth_app["id"],
|
{
|
||||||
"userId": test_user,
|
"token": revoked_token_hash,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"applicationId": test_oauth_app["id"],
|
||||||
"expiresAt": now + timedelta(days=30), # Not expired
|
"userId": test_user,
|
||||||
"revokedAt": now - timedelta(hours=1), # But revoked
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
}
|
"expiresAt": now + timedelta(days=30), # Not expired
|
||||||
|
"revokedAt": now - timedelta(hours=1), # But revoked
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
@@ -1013,19 +1038,22 @@ async def other_oauth_app(test_user: str):
|
|||||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||||
|
|
||||||
await PrismaOAuthApplication.prisma().create(
|
await PrismaOAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": app_id,
|
OAuthApplicationCreateInput,
|
||||||
"name": "Other OAuth App",
|
{
|
||||||
"description": "Second test application",
|
"id": app_id,
|
||||||
"clientId": client_id,
|
"name": "Other OAuth App",
|
||||||
"clientSecret": client_secret_hash,
|
"description": "Second test application",
|
||||||
"clientSecretSalt": client_secret_salt,
|
"clientId": client_id,
|
||||||
"redirectUris": ["https://other.example.com/callback"],
|
"clientSecret": client_secret_hash,
|
||||||
"grantTypes": ["authorization_code", "refresh_token"],
|
"clientSecretSalt": client_secret_salt,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"redirectUris": ["https://other.example.com/callback"],
|
||||||
"ownerId": test_user,
|
"grantTypes": ["authorization_code", "refresh_token"],
|
||||||
"isActive": True,
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
}
|
"ownerId": test_user,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
yield {
|
yield {
|
||||||
@@ -1052,13 +1080,16 @@ async def test_token_refresh_wrong_application(
|
|||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
await PrismaOAuthRefreshToken.prisma().create(
|
await PrismaOAuthRefreshToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"token": token_hash,
|
OAuthRefreshTokenCreateInput,
|
||||||
"applicationId": test_oauth_app["id"], # Belongs to test_oauth_app
|
{
|
||||||
"userId": test_user,
|
"token": token_hash,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"applicationId": test_oauth_app["id"], # Belongs to test_oauth_app
|
||||||
"expiresAt": now + timedelta(days=30),
|
"userId": test_user,
|
||||||
}
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
|
"expiresAt": now + timedelta(days=30),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Try to use it with `other_oauth_app`
|
# Try to use it with `other_oauth_app`
|
||||||
@@ -1267,19 +1298,22 @@ async def test_validate_access_token_fails_when_app_disabled(
|
|||||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||||
|
|
||||||
await PrismaOAuthApplication.prisma().create(
|
await PrismaOAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": app_id,
|
OAuthApplicationCreateInput,
|
||||||
"name": "App To Be Disabled",
|
{
|
||||||
"description": "Test app for disabled validation",
|
"id": app_id,
|
||||||
"clientId": client_id,
|
"name": "App To Be Disabled",
|
||||||
"clientSecret": client_secret_hash,
|
"description": "Test app for disabled validation",
|
||||||
"clientSecretSalt": client_secret_salt,
|
"clientId": client_id,
|
||||||
"redirectUris": ["https://example.com/callback"],
|
"clientSecret": client_secret_hash,
|
||||||
"grantTypes": ["authorization_code"],
|
"clientSecretSalt": client_secret_salt,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"redirectUris": ["https://example.com/callback"],
|
||||||
"ownerId": test_user,
|
"grantTypes": ["authorization_code"],
|
||||||
"isActive": True,
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
}
|
"ownerId": test_user,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create an access token directly in the database
|
# Create an access token directly in the database
|
||||||
@@ -1288,13 +1322,16 @@ async def test_validate_access_token_fails_when_app_disabled(
|
|||||||
now = datetime.now(timezone.utc)
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
await PrismaOAuthAccessToken.prisma().create(
|
await PrismaOAuthAccessToken.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"token": token_hash,
|
OAuthAccessTokenCreateInput,
|
||||||
"applicationId": app_id,
|
{
|
||||||
"userId": test_user,
|
"token": token_hash,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
"applicationId": app_id,
|
||||||
"expiresAt": now + timedelta(hours=1),
|
"userId": test_user,
|
||||||
}
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||||
|
"expiresAt": now + timedelta(hours=1),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Token should be valid while app is active
|
# Token should be valid while app is active
|
||||||
@@ -1561,19 +1598,22 @@ async def test_revoke_token_from_different_app_fails_silently(
|
|||||||
)
|
)
|
||||||
|
|
||||||
await PrismaOAuthApplication.prisma().create(
|
await PrismaOAuthApplication.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"id": app2_id,
|
OAuthApplicationCreateInput,
|
||||||
"name": "Second Test OAuth App",
|
{
|
||||||
"description": "Second test application for cross-app revocation test",
|
"id": app2_id,
|
||||||
"clientId": app2_client_id,
|
"name": "Second Test OAuth App",
|
||||||
"clientSecret": app2_client_secret_hash,
|
"description": "Second test application for cross-app revocation test",
|
||||||
"clientSecretSalt": app2_client_secret_salt,
|
"clientId": app2_client_id,
|
||||||
"redirectUris": ["https://other-app.com/callback"],
|
"clientSecret": app2_client_secret_hash,
|
||||||
"grantTypes": ["authorization_code", "refresh_token"],
|
"clientSecretSalt": app2_client_secret_salt,
|
||||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
"redirectUris": ["https://other-app.com/callback"],
|
||||||
"ownerId": test_user,
|
"grantTypes": ["authorization_code", "refresh_token"],
|
||||||
"isActive": True,
|
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||||
}
|
"ownerId": test_user,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# App 2 tries to revoke App 1's access token
|
# App 2 tries to revoke App 1's access token
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ async def postmark_webhook_handler(
|
|||||||
webhook: Annotated[
|
webhook: Annotated[
|
||||||
PostmarkWebhook,
|
PostmarkWebhook,
|
||||||
Body(discriminator="RecordType"),
|
Body(discriminator="RecordType"),
|
||||||
]
|
],
|
||||||
):
|
):
|
||||||
logger.info(f"Received webhook from Postmark: {webhook}")
|
logger.info(f"Received webhook from Postmark: {webhook}")
|
||||||
match webhook:
|
match webhook:
|
||||||
|
|||||||
@@ -522,8 +522,8 @@ async def test_api_keys_with_newline_variations(mock_request):
|
|||||||
"valid\r\ntoken", # Windows newline
|
"valid\r\ntoken", # Windows newline
|
||||||
"valid\rtoken", # Mac newline
|
"valid\rtoken", # Mac newline
|
||||||
"valid\x85token", # NEL (Next Line)
|
"valid\x85token", # NEL (Next Line)
|
||||||
"valid\x0Btoken", # Vertical Tab
|
"valid\x0btoken", # Vertical Tab
|
||||||
"valid\x0Ctoken", # Form Feed
|
"valid\x0ctoken", # Form Feed
|
||||||
]
|
]
|
||||||
|
|
||||||
for api_key in newline_variations:
|
for api_key in newline_variations:
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
import uuid
|
import uuid
|
||||||
from datetime import UTC, datetime
|
from datetime import UTC, datetime
|
||||||
from os import getenv
|
from os import getenv
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from prisma.types import ProfileCreateInput
|
||||||
from pydantic import SecretStr
|
from pydantic import SecretStr
|
||||||
|
|
||||||
from backend.blocks.firecrawl.scrape import FirecrawlScrapeBlock
|
from backend.blocks.firecrawl.scrape import FirecrawlScrapeBlock
|
||||||
@@ -49,13 +51,16 @@ async def setup_test_data():
|
|||||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||||
username = user.email.split("@")[0]
|
username = user.email.split("@")[0]
|
||||||
await prisma.profile.create(
|
await prisma.profile.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
ProfileCreateInput,
|
||||||
"username": username,
|
{
|
||||||
"name": f"Test User {username}",
|
"userId": user.id,
|
||||||
"description": "Test user profile",
|
"username": username,
|
||||||
"links": [], # Required field - empty array for test profiles
|
"name": f"Test User {username}",
|
||||||
}
|
"description": "Test user profile",
|
||||||
|
"links": [], # Required field - empty array for test profiles
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# 2. Create a test graph with agent input -> agent output
|
# 2. Create a test graph with agent input -> agent output
|
||||||
@@ -172,13 +177,16 @@ async def setup_llm_test_data():
|
|||||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||||
username = user.email.split("@")[0]
|
username = user.email.split("@")[0]
|
||||||
await prisma.profile.create(
|
await prisma.profile.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
ProfileCreateInput,
|
||||||
"username": username,
|
{
|
||||||
"name": f"Test User {username}",
|
"userId": user.id,
|
||||||
"description": "Test user profile for LLM tests",
|
"username": username,
|
||||||
"links": [], # Required field - empty array for test profiles
|
"name": f"Test User {username}",
|
||||||
}
|
"description": "Test user profile for LLM tests",
|
||||||
|
"links": [], # Required field - empty array for test profiles
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# 2. Create test OpenAI credentials for the user
|
# 2. Create test OpenAI credentials for the user
|
||||||
@@ -332,13 +340,16 @@ async def setup_firecrawl_test_data():
|
|||||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||||
username = user.email.split("@")[0]
|
username = user.email.split("@")[0]
|
||||||
await prisma.profile.create(
|
await prisma.profile.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
ProfileCreateInput,
|
||||||
"username": username,
|
{
|
||||||
"name": f"Test User {username}",
|
"userId": user.id,
|
||||||
"description": "Test user profile for Firecrawl tests",
|
"username": username,
|
||||||
"links": [], # Required field - empty array for test profiles
|
"name": f"Test User {username}",
|
||||||
}
|
"description": "Test user profile for Firecrawl tests",
|
||||||
|
"links": [], # Required field - empty array for test profiles
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: We deliberately do NOT create Firecrawl credentials for this user
|
# NOTE: We deliberately do NOT create Firecrawl credentials for this user
|
||||||
|
|||||||
@@ -1,12 +1,13 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional, cast
|
||||||
|
|
||||||
import fastapi
|
import fastapi
|
||||||
import prisma.errors
|
import prisma.errors
|
||||||
import prisma.fields
|
import prisma.fields
|
||||||
import prisma.models
|
import prisma.models
|
||||||
import prisma.types
|
import prisma.types
|
||||||
|
from prisma.types import LibraryAgentCreateInput
|
||||||
|
|
||||||
import backend.data.graph as graph_db
|
import backend.data.graph as graph_db
|
||||||
import backend.data.integrations as integrations_db
|
import backend.data.integrations as integrations_db
|
||||||
@@ -802,18 +803,21 @@ async def add_store_agent_to_library(
|
|||||||
|
|
||||||
# Create LibraryAgent entry
|
# Create LibraryAgent entry
|
||||||
added_agent = await prisma.models.LibraryAgent.prisma().create(
|
added_agent = await prisma.models.LibraryAgent.prisma().create(
|
||||||
data={
|
data=cast(
|
||||||
"User": {"connect": {"id": user_id}},
|
LibraryAgentCreateInput,
|
||||||
"AgentGraph": {
|
{
|
||||||
"connect": {
|
"User": {"connect": {"id": user_id}},
|
||||||
"graphVersionId": {"id": graph.id, "version": graph.version}
|
"AgentGraph": {
|
||||||
}
|
"connect": {
|
||||||
|
"graphVersionId": {"id": graph.id, "version": graph.version}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"isCreatedByUser": False,
|
||||||
|
"settings": SafeJson(
|
||||||
|
_initialize_graph_settings(graph_model).model_dump()
|
||||||
|
),
|
||||||
},
|
},
|
||||||
"isCreatedByUser": False,
|
),
|
||||||
"settings": SafeJson(
|
|
||||||
_initialize_graph_settings(graph_model).model_dump()
|
|
||||||
),
|
|
||||||
},
|
|
||||||
include=library_agent_include(
|
include=library_agent_include(
|
||||||
user_id, include_nodes=False, include_executions=False
|
user_id, include_nodes=False, include_executions=False
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -2,13 +2,14 @@ import asyncio
|
|||||||
import logging
|
import logging
|
||||||
import typing
|
import typing
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Literal
|
from typing import Literal, cast
|
||||||
|
|
||||||
import fastapi
|
import fastapi
|
||||||
import prisma.enums
|
import prisma.enums
|
||||||
import prisma.errors
|
import prisma.errors
|
||||||
import prisma.models
|
import prisma.models
|
||||||
import prisma.types
|
import prisma.types
|
||||||
|
from prisma.types import SearchTermsCreateInput, StoreListingVersionCreateInput
|
||||||
|
|
||||||
import backend.server.v2.store.exceptions
|
import backend.server.v2.store.exceptions
|
||||||
import backend.server.v2.store.model
|
import backend.server.v2.store.model
|
||||||
@@ -248,7 +249,10 @@ async def log_search_term(search_query: str):
|
|||||||
date = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
|
date = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
try:
|
try:
|
||||||
await prisma.models.SearchTerms.prisma().create(
|
await prisma.models.SearchTerms.prisma().create(
|
||||||
data={"searchTerm": search_query, "createdDate": date}
|
data=cast(
|
||||||
|
SearchTermsCreateInput,
|
||||||
|
{"searchTerm": search_query, "createdDate": date},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Fail silently here so that logging search terms doesn't break the app
|
# Fail silently here so that logging search terms doesn't break the app
|
||||||
@@ -1431,11 +1435,14 @@ async def _approve_sub_agent(
|
|||||||
# Create new version if no matching version found
|
# Create new version if no matching version found
|
||||||
next_version = max((v.version for v in listing.Versions or []), default=0) + 1
|
next_version = max((v.version for v in listing.Versions or []), default=0) + 1
|
||||||
await prisma.models.StoreListingVersion.prisma(tx).create(
|
await prisma.models.StoreListingVersion.prisma(tx).create(
|
||||||
data={
|
data=cast(
|
||||||
**_create_sub_agent_version_data(sub_graph, heading, main_agent_name),
|
StoreListingVersionCreateInput,
|
||||||
"version": next_version,
|
{
|
||||||
"storeListingId": listing.id,
|
**_create_sub_agent_version_data(sub_graph, heading, main_agent_name),
|
||||||
}
|
"version": next_version,
|
||||||
|
"storeListingId": listing.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
await prisma.models.StoreListing.prisma(tx).update(
|
await prisma.models.StoreListing.prisma(tx).update(
|
||||||
where={"id": listing.id}, data={"hasApprovedVersion": True}
|
where={"id": listing.id}, data={"hasApprovedVersion": True}
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ def mock_storage_client(mocker):
|
|||||||
|
|
||||||
async def test_upload_media_success(mock_settings, mock_storage_client):
|
async def test_upload_media_success(mock_settings, mock_storage_client):
|
||||||
# Create test JPEG data with valid signature
|
# Create test JPEG data with valid signature
|
||||||
test_data = b"\xFF\xD8\xFF" + b"test data"
|
test_data = b"\xff\xd8\xff" + b"test data"
|
||||||
|
|
||||||
test_file = fastapi.UploadFile(
|
test_file = fastapi.UploadFile(
|
||||||
filename="laptop.jpeg",
|
filename="laptop.jpeg",
|
||||||
@@ -83,7 +83,7 @@ async def test_upload_media_missing_credentials(monkeypatch):
|
|||||||
|
|
||||||
test_file = fastapi.UploadFile(
|
test_file = fastapi.UploadFile(
|
||||||
filename="laptop.jpeg",
|
filename="laptop.jpeg",
|
||||||
file=io.BytesIO(b"\xFF\xD8\xFF" + b"test data"), # Valid JPEG signature
|
file=io.BytesIO(b"\xff\xd8\xff" + b"test data"), # Valid JPEG signature
|
||||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -108,7 +108,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client):
|
|||||||
|
|
||||||
|
|
||||||
async def test_upload_media_file_too_large(mock_settings, mock_storage_client):
|
async def test_upload_media_file_too_large(mock_settings, mock_storage_client):
|
||||||
large_data = b"\xFF\xD8\xFF" + b"x" * (
|
large_data = b"\xff\xd8\xff" + b"x" * (
|
||||||
50 * 1024 * 1024 + 1
|
50 * 1024 * 1024 + 1
|
||||||
) # 50MB + 1 byte with valid JPEG signature
|
) # 50MB + 1 byte with valid JPEG signature
|
||||||
test_file = fastapi.UploadFile(
|
test_file = fastapi.UploadFile(
|
||||||
|
|||||||
@@ -4,14 +4,12 @@ Centralized service client helpers with thread caching.
|
|||||||
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from backend.util.cache import cached, thread_cached
|
from backend.util.cache import thread_cached
|
||||||
from backend.util.settings import Settings
|
from backend.util.settings import Settings
|
||||||
|
|
||||||
settings = Settings()
|
settings = Settings()
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from supabase import AClient, Client
|
|
||||||
|
|
||||||
from backend.data.execution import (
|
from backend.data.execution import (
|
||||||
AsyncRedisExecutionEventBus,
|
AsyncRedisExecutionEventBus,
|
||||||
RedisExecutionEventBus,
|
RedisExecutionEventBus,
|
||||||
@@ -116,29 +114,6 @@ def get_integration_credentials_store() -> "IntegrationCredentialsStore":
|
|||||||
return IntegrationCredentialsStore()
|
return IntegrationCredentialsStore()
|
||||||
|
|
||||||
|
|
||||||
# ============ Supabase Clients ============ #
|
|
||||||
|
|
||||||
|
|
||||||
@cached(ttl_seconds=3600)
|
|
||||||
def get_supabase() -> "Client":
|
|
||||||
"""Get a process-cached synchronous Supabase client instance."""
|
|
||||||
from supabase import create_client
|
|
||||||
|
|
||||||
return create_client(
|
|
||||||
settings.secrets.supabase_url, settings.secrets.supabase_service_role_key
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@cached(ttl_seconds=3600)
|
|
||||||
async def get_async_supabase() -> "AClient":
|
|
||||||
"""Get a process-cached asynchronous Supabase client instance."""
|
|
||||||
from supabase import create_async_client
|
|
||||||
|
|
||||||
return await create_async_client(
|
|
||||||
settings.secrets.supabase_url, settings.secrets.supabase_service_role_key
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============ Notification Queue Helpers ============ #
|
# ============ Notification Queue Helpers ============ #
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ def shutdown_launchdarkly() -> None:
|
|||||||
@cached(maxsize=1000, ttl_seconds=86400) # 1000 entries, 24 hours TTL
|
@cached(maxsize=1000, ttl_seconds=86400) # 1000 entries, 24 hours TTL
|
||||||
async def _fetch_user_context_data(user_id: str) -> Context:
|
async def _fetch_user_context_data(user_id: str) -> Context:
|
||||||
"""
|
"""
|
||||||
Fetch user context for LaunchDarkly from Supabase.
|
Fetch user context for LaunchDarkly from the database.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
user_id: The user ID to fetch data for
|
user_id: The user ID to fetch data for
|
||||||
@@ -94,12 +94,11 @@ async def _fetch_user_context_data(user_id: str) -> Context:
|
|||||||
builder = Context.builder(user_id).kind("user").anonymous(True)
|
builder = Context.builder(user_id).kind("user").anonymous(True)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from backend.util.clients import get_supabase
|
from backend.data.db import prisma
|
||||||
|
|
||||||
# If we have user data, update context
|
# If we have user data, update context
|
||||||
response = get_supabase().auth.admin.get_user_by_id(user_id)
|
user = await prisma.user.find_unique(where={"id": user_id})
|
||||||
if response and response.user:
|
if user:
|
||||||
user = response.user
|
|
||||||
builder.anonymous(False)
|
builder.anonymous(False)
|
||||||
if user.role:
|
if user.role:
|
||||||
builder.set("role", user.role)
|
builder.set("role", user.role)
|
||||||
|
|||||||
@@ -530,11 +530,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
|||||||
class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||||
"""Secrets for the server."""
|
"""Secrets for the server."""
|
||||||
|
|
||||||
supabase_url: str = Field(default="", description="Supabase URL")
|
|
||||||
supabase_service_role_key: str = Field(
|
|
||||||
default="", description="Supabase service role key"
|
|
||||||
)
|
|
||||||
|
|
||||||
encryption_key: str = Field(default="", description="Encryption key")
|
encryption_key: str = Field(default="", description="Encryption key")
|
||||||
|
|
||||||
rabbitmq_default_user: str = Field(default="", description="RabbitMQ default user")
|
rabbitmq_default_user: str = Field(default="", description="RabbitMQ default user")
|
||||||
|
|||||||
@@ -222,9 +222,9 @@ class TestSafeJson:
|
|||||||
problematic_data = {
|
problematic_data = {
|
||||||
"null_byte": "data with \x00 null",
|
"null_byte": "data with \x00 null",
|
||||||
"bell_char": "data with \x07 bell",
|
"bell_char": "data with \x07 bell",
|
||||||
"form_feed": "data with \x0C feed",
|
"form_feed": "data with \x0c feed",
|
||||||
"escape_char": "data with \x1B escape",
|
"escape_char": "data with \x1b escape",
|
||||||
"delete_char": "data with \x7F delete",
|
"delete_char": "data with \x7f delete",
|
||||||
}
|
}
|
||||||
|
|
||||||
# SafeJson should successfully process data with control characters
|
# SafeJson should successfully process data with control characters
|
||||||
@@ -235,9 +235,9 @@ class TestSafeJson:
|
|||||||
result_data = result.data
|
result_data = result.data
|
||||||
assert "\x00" not in str(result_data) # null byte removed
|
assert "\x00" not in str(result_data) # null byte removed
|
||||||
assert "\x07" not in str(result_data) # bell removed
|
assert "\x07" not in str(result_data) # bell removed
|
||||||
assert "\x0C" not in str(result_data) # form feed removed
|
assert "\x0c" not in str(result_data) # form feed removed
|
||||||
assert "\x1B" not in str(result_data) # escape removed
|
assert "\x1b" not in str(result_data) # escape removed
|
||||||
assert "\x7F" not in str(result_data) # delete removed
|
assert "\x7f" not in str(result_data) # delete removed
|
||||||
|
|
||||||
# Test that safe whitespace characters are preserved
|
# Test that safe whitespace characters are preserved
|
||||||
safe_data = {
|
safe_data = {
|
||||||
@@ -263,7 +263,7 @@ class TestSafeJson:
|
|||||||
def test_web_scraping_content_sanitization(self):
|
def test_web_scraping_content_sanitization(self):
|
||||||
"""Test sanitization of typical web scraping content with null characters."""
|
"""Test sanitization of typical web scraping content with null characters."""
|
||||||
# Simulate web content that might contain null bytes from SearchTheWebBlock
|
# Simulate web content that might contain null bytes from SearchTheWebBlock
|
||||||
web_content = "Article title\x00Hidden null\x01Start of heading\x08Backspace\x0CForm feed content\x1FUnit separator\x7FDelete char"
|
web_content = "Article title\x00Hidden null\x01Start of heading\x08Backspace\x0cForm feed content\x1fUnit separator\x7fDelete char"
|
||||||
|
|
||||||
result = SafeJson(web_content)
|
result = SafeJson(web_content)
|
||||||
assert isinstance(result, Json)
|
assert isinstance(result, Json)
|
||||||
@@ -273,9 +273,9 @@ class TestSafeJson:
|
|||||||
assert "\x00" not in sanitized_content
|
assert "\x00" not in sanitized_content
|
||||||
assert "\x01" not in sanitized_content
|
assert "\x01" not in sanitized_content
|
||||||
assert "\x08" not in sanitized_content
|
assert "\x08" not in sanitized_content
|
||||||
assert "\x0C" not in sanitized_content
|
assert "\x0c" not in sanitized_content
|
||||||
assert "\x1F" not in sanitized_content
|
assert "\x1f" not in sanitized_content
|
||||||
assert "\x7F" not in sanitized_content
|
assert "\x7f" not in sanitized_content
|
||||||
|
|
||||||
# Verify the content is still readable
|
# Verify the content is still readable
|
||||||
assert "Article title" in sanitized_content
|
assert "Article title" in sanitized_content
|
||||||
@@ -391,7 +391,7 @@ class TestSafeJson:
|
|||||||
mixed_content = {
|
mixed_content = {
|
||||||
"safe_and_unsafe": "Good text\twith tab\x00NULL BYTE\nand newline\x08BACKSPACE",
|
"safe_and_unsafe": "Good text\twith tab\x00NULL BYTE\nand newline\x08BACKSPACE",
|
||||||
"file_path_with_null": "C:\\temp\\file\x00.txt",
|
"file_path_with_null": "C:\\temp\\file\x00.txt",
|
||||||
"json_with_controls": '{"text": "data\x01\x0C\x1F"}',
|
"json_with_controls": '{"text": "data\x01\x0c\x1f"}',
|
||||||
}
|
}
|
||||||
|
|
||||||
result = SafeJson(mixed_content)
|
result = SafeJson(mixed_content)
|
||||||
@@ -419,13 +419,13 @@ class TestSafeJson:
|
|||||||
|
|
||||||
# Create data with various problematic escape sequences that could cause JSON parsing errors
|
# Create data with various problematic escape sequences that could cause JSON parsing errors
|
||||||
problematic_output_data = {
|
problematic_output_data = {
|
||||||
"web_content": "Article text\x00with null\x01and control\x08chars\x0C\x1F\x7F",
|
"web_content": "Article text\x00with null\x01and control\x08chars\x0c\x1f\x7f",
|
||||||
"file_path": "C:\\Users\\test\\file\x00.txt",
|
"file_path": "C:\\Users\\test\\file\x00.txt",
|
||||||
"json_like_string": '{"text": "data\x00\x08\x1F"}',
|
"json_like_string": '{"text": "data\x00\x08\x1f"}',
|
||||||
"escaped_sequences": "Text with \\u0000 and \\u0008 sequences",
|
"escaped_sequences": "Text with \\u0000 and \\u0008 sequences",
|
||||||
"mixed_content": "Normal text\tproperly\nformatted\rwith\x00invalid\x08chars\x1Fmixed",
|
"mixed_content": "Normal text\tproperly\nformatted\rwith\x00invalid\x08chars\x1fmixed",
|
||||||
"large_text": "A" * 35000
|
"large_text": "A" * 35000
|
||||||
+ "\x00\x08\x1F"
|
+ "\x00\x08\x1f"
|
||||||
+ "B" * 5000, # Large text like in the error
|
+ "B" * 5000, # Large text like in the error
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -446,9 +446,9 @@ class TestSafeJson:
|
|||||||
assert "\x00" not in str(web_content)
|
assert "\x00" not in str(web_content)
|
||||||
assert "\x01" not in str(web_content)
|
assert "\x01" not in str(web_content)
|
||||||
assert "\x08" not in str(web_content)
|
assert "\x08" not in str(web_content)
|
||||||
assert "\x0C" not in str(web_content)
|
assert "\x0c" not in str(web_content)
|
||||||
assert "\x1F" not in str(web_content)
|
assert "\x1f" not in str(web_content)
|
||||||
assert "\x7F" not in str(web_content)
|
assert "\x7f" not in str(web_content)
|
||||||
|
|
||||||
# Check that legitimate content is preserved
|
# Check that legitimate content is preserved
|
||||||
assert "Article text" in str(web_content)
|
assert "Article text" in str(web_content)
|
||||||
@@ -467,7 +467,7 @@ class TestSafeJson:
|
|||||||
assert "B" * 1000 in str(large_text) # B's preserved
|
assert "B" * 1000 in str(large_text) # B's preserved
|
||||||
assert "\x00" not in str(large_text) # Control chars removed
|
assert "\x00" not in str(large_text) # Control chars removed
|
||||||
assert "\x08" not in str(large_text)
|
assert "\x08" not in str(large_text)
|
||||||
assert "\x1F" not in str(large_text)
|
assert "\x1f" not in str(large_text)
|
||||||
|
|
||||||
# Most importantly: ensure the result can be JSON-serialized without errors
|
# Most importantly: ensure the result can be JSON-serialized without errors
|
||||||
# This would have failed with the old approach
|
# This would have failed with the old approach
|
||||||
@@ -602,7 +602,7 @@ class TestSafeJson:
|
|||||||
model = SamplePydanticModel(
|
model = SamplePydanticModel(
|
||||||
name="Test\x00User", # Has null byte
|
name="Test\x00User", # Has null byte
|
||||||
age=30,
|
age=30,
|
||||||
metadata={"info": "data\x08with\x0Ccontrols"},
|
metadata={"info": "data\x08with\x0ccontrols"},
|
||||||
)
|
)
|
||||||
|
|
||||||
data = {"credential": model}
|
data = {"credential": model}
|
||||||
@@ -616,7 +616,7 @@ class TestSafeJson:
|
|||||||
json_string = json.dumps(result.data)
|
json_string = json.dumps(result.data)
|
||||||
assert "\x00" not in json_string
|
assert "\x00" not in json_string
|
||||||
assert "\x08" not in json_string
|
assert "\x08" not in json_string
|
||||||
assert "\x0C" not in json_string
|
assert "\x0c" not in json_string
|
||||||
assert "TestUser" in json_string # Name preserved minus null byte
|
assert "TestUser" in json_string # Name preserved minus null byte
|
||||||
|
|
||||||
def test_deeply_nested_pydantic_models_control_char_sanitization(self):
|
def test_deeply_nested_pydantic_models_control_char_sanitization(self):
|
||||||
@@ -639,16 +639,16 @@ class TestSafeJson:
|
|||||||
|
|
||||||
# Create test data with control characters at every nesting level
|
# Create test data with control characters at every nesting level
|
||||||
inner = InnerModel(
|
inner = InnerModel(
|
||||||
deep_string="Deepest\x00Level\x08Control\x0CChars", # Multiple control chars at deepest level
|
deep_string="Deepest\x00Level\x08Control\x0cChars", # Multiple control chars at deepest level
|
||||||
metadata={
|
metadata={
|
||||||
"nested_key": "Nested\x1FValue\x7FDelete"
|
"nested_key": "Nested\x1fValue\x7fDelete"
|
||||||
}, # Control chars in nested dict
|
}, # Control chars in nested dict
|
||||||
)
|
)
|
||||||
|
|
||||||
middle = MiddleModel(
|
middle = MiddleModel(
|
||||||
middle_string="Middle\x01StartOfHeading\x1FUnitSeparator",
|
middle_string="Middle\x01StartOfHeading\x1fUnitSeparator",
|
||||||
inner=inner,
|
inner=inner,
|
||||||
data="Some\x0BVerticalTab\x0EShiftOut",
|
data="Some\x0bVerticalTab\x0eShiftOut",
|
||||||
)
|
)
|
||||||
|
|
||||||
outer = OuterModel(outer_string="Outer\x00Null\x07Bell", middle=middle)
|
outer = OuterModel(outer_string="Outer\x00Null\x07Bell", middle=middle)
|
||||||
@@ -659,7 +659,7 @@ class TestSafeJson:
|
|||||||
"nested_model": outer,
|
"nested_model": outer,
|
||||||
"list_with_strings": [
|
"list_with_strings": [
|
||||||
"List\x00Item1",
|
"List\x00Item1",
|
||||||
"List\x0CItem2\x1F",
|
"List\x0cItem2\x1f",
|
||||||
{"dict_in_list": "Dict\x08Value"},
|
{"dict_in_list": "Dict\x08Value"},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@@ -684,10 +684,10 @@ class TestSafeJson:
|
|||||||
"\x06",
|
"\x06",
|
||||||
"\x07",
|
"\x07",
|
||||||
"\x08",
|
"\x08",
|
||||||
"\x0B",
|
"\x0b",
|
||||||
"\x0C",
|
"\x0c",
|
||||||
"\x0E",
|
"\x0e",
|
||||||
"\x0F",
|
"\x0f",
|
||||||
"\x10",
|
"\x10",
|
||||||
"\x11",
|
"\x11",
|
||||||
"\x12",
|
"\x12",
|
||||||
@@ -698,13 +698,13 @@ class TestSafeJson:
|
|||||||
"\x17",
|
"\x17",
|
||||||
"\x18",
|
"\x18",
|
||||||
"\x19",
|
"\x19",
|
||||||
"\x1A",
|
"\x1a",
|
||||||
"\x1B",
|
"\x1b",
|
||||||
"\x1C",
|
"\x1c",
|
||||||
"\x1D",
|
"\x1d",
|
||||||
"\x1E",
|
"\x1e",
|
||||||
"\x1F",
|
"\x1f",
|
||||||
"\x7F",
|
"\x7f",
|
||||||
]
|
]
|
||||||
|
|
||||||
for char in control_chars:
|
for char in control_chars:
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ networks:
|
|||||||
name: shared-network
|
name: shared-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
supabase-config:
|
clamav-data:
|
||||||
|
|
||||||
x-agpt-services:
|
x-agpt-services:
|
||||||
&agpt-services
|
&agpt-services
|
||||||
@@ -13,28 +13,18 @@ x-agpt-services:
|
|||||||
- app-network
|
- app-network
|
||||||
- shared-network
|
- shared-network
|
||||||
|
|
||||||
x-supabase-services:
|
|
||||||
&supabase-services
|
|
||||||
networks:
|
|
||||||
- app-network
|
|
||||||
- shared-network
|
|
||||||
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
clamav-data:
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
db:
|
db:
|
||||||
<<: *supabase-services
|
<<: *agpt-services
|
||||||
extends:
|
extends:
|
||||||
file: ../db/docker/docker-compose.yml
|
file: ../db/docker/docker-compose.yml
|
||||||
service: db
|
service: db
|
||||||
ports:
|
ports:
|
||||||
- ${POSTGRES_PORT}:5432 # We don't use Supavisor locally, so we expose the db directly.
|
- ${POSTGRES_PORT}:5432
|
||||||
|
|
||||||
vector:
|
vector:
|
||||||
<<: *supabase-services
|
<<: *agpt-services
|
||||||
extends:
|
extends:
|
||||||
file: ../db/docker/docker-compose.yml
|
file: ../db/docker/docker-compose.yml
|
||||||
service: vector
|
service: vector
|
||||||
@@ -67,6 +57,7 @@ services:
|
|||||||
ports:
|
ports:
|
||||||
- "5672:5672"
|
- "5672:5672"
|
||||||
- "15672:15672"
|
- "15672:15672"
|
||||||
|
|
||||||
clamav:
|
clamav:
|
||||||
image: clamav/clamav-debian:latest
|
image: clamav/clamav-debian:latest
|
||||||
ports:
|
ports:
|
||||||
@@ -85,6 +76,7 @@ services:
|
|||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
|
|
||||||
networks:
|
networks:
|
||||||
app-network-test:
|
app-network-test:
|
||||||
driver: bridge
|
driver: bridge
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ Clean, streamlined load testing infrastructure for the AutoGPT Platform using k6
|
|||||||
## 🚀 Quick Start
|
## 🚀 Quick Start
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Set up Supabase service key (required for token generation)
|
# 1. Set up API base URL (optional, defaults to local)
|
||||||
export SUPABASE_SERVICE_KEY="your-supabase-service-key"
|
export API_BASE_URL="http://localhost:8006"
|
||||||
|
|
||||||
# 2. Generate pre-authenticated tokens (first time setup - creates 160+ tokens with 24-hour expiry)
|
# 2. Generate pre-authenticated tokens (first time setup - creates 160+ tokens with 24-hour expiry)
|
||||||
node generate-tokens.js --count=160
|
node generate-tokens.js --count=160
|
||||||
@@ -87,9 +87,9 @@ npm run cloud
|
|||||||
- **Generation**: Run `node generate-tokens.js --count=160` to create tokens
|
- **Generation**: Run `node generate-tokens.js --count=160` to create tokens
|
||||||
- **File**: `configs/pre-authenticated-tokens.js` (gitignored for security)
|
- **File**: `configs/pre-authenticated-tokens.js` (gitignored for security)
|
||||||
- **Capacity**: 160+ tokens supporting high-concurrency testing
|
- **Capacity**: 160+ tokens supporting high-concurrency testing
|
||||||
- **Expiry**: 24 hours (86400 seconds) - extended for long-duration testing
|
- **Expiry**: Based on JWT token expiry settings (default: 15 min access, 7 day refresh)
|
||||||
- **Benefit**: Eliminates Supabase auth rate limiting at scale
|
- **Benefit**: Eliminates auth rate limiting at scale
|
||||||
- **Regeneration**: Run `node generate-tokens.js --count=160` when tokens expire after 24 hours
|
- **Regeneration**: Run `node generate-tokens.js --count=160` when tokens expire
|
||||||
|
|
||||||
### Environment Configuration
|
### Environment Configuration
|
||||||
|
|
||||||
@@ -182,29 +182,29 @@ npm run cloud
|
|||||||
|
|
||||||
### Required Setup
|
### Required Setup
|
||||||
|
|
||||||
**1. Supabase Service Key (Required for all testing):**
|
**1. API Base URL (Optional):**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Option 1: From your local environment (if available)
|
# For local testing (default)
|
||||||
export SUPABASE_SERVICE_KEY="your-supabase-service-key"
|
export API_BASE_URL="http://localhost:8006"
|
||||||
|
|
||||||
# Option 2: From Kubernetes secret (for platform developers)
|
# For dev environment
|
||||||
kubectl get secret supabase-service-key -o jsonpath='{.data.service-key}' | base64 -d
|
export API_BASE_URL="https://dev-server.agpt.co"
|
||||||
|
|
||||||
# Option 3: From Supabase dashboard
|
# For production (coordinate with team!)
|
||||||
# Go to Project Settings > API > service_role key (never commit this!)
|
export API_BASE_URL="https://api.agpt.co"
|
||||||
```
|
```
|
||||||
|
|
||||||
**2. Generate Pre-Authenticated Tokens (Required):**
|
**2. Generate Pre-Authenticated Tokens (Required):**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Creates 160 tokens with 24-hour expiry - prevents auth rate limiting
|
# Creates 160 tokens - prevents auth rate limiting
|
||||||
node generate-tokens.js --count=160
|
node generate-tokens.js --count=160
|
||||||
|
|
||||||
# Generate fewer tokens for smaller tests (minimum 10)
|
# Generate fewer tokens for smaller tests (minimum 10)
|
||||||
node generate-tokens.js --count=50
|
node generate-tokens.js --count=50
|
||||||
|
|
||||||
# Regenerate when tokens expire (every 24 hours)
|
# Regenerate when tokens expire
|
||||||
node generate-tokens.js --count=160
|
node generate-tokens.js --count=160
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|||||||
@@ -4,25 +4,16 @@ export const ENV_CONFIG = {
|
|||||||
API_BASE_URL: "https://dev-server.agpt.co",
|
API_BASE_URL: "https://dev-server.agpt.co",
|
||||||
BUILDER_BASE_URL: "https://dev-builder.agpt.co",
|
BUILDER_BASE_URL: "https://dev-builder.agpt.co",
|
||||||
WS_BASE_URL: "wss://dev-ws-server.agpt.co",
|
WS_BASE_URL: "wss://dev-ws-server.agpt.co",
|
||||||
SUPABASE_URL: "https://adfjtextkuilwuhzdjpf.supabase.co",
|
|
||||||
SUPABASE_ANON_KEY:
|
|
||||||
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImFkZmp0ZXh0a3VpbHd1aHpkanBmIiwicm9sZSI6ImFub24iLCJpYXQiOjE3MzAyNTE3MDIsImV4cCI6MjA0NTgyNzcwMn0.IuQNXsHEKJNxtS9nyFeqO0BGMYN8sPiObQhuJLSK9xk",
|
|
||||||
},
|
},
|
||||||
LOCAL: {
|
LOCAL: {
|
||||||
API_BASE_URL: "http://localhost:8006",
|
API_BASE_URL: "http://localhost:8006",
|
||||||
BUILDER_BASE_URL: "http://localhost:3000",
|
BUILDER_BASE_URL: "http://localhost:3000",
|
||||||
WS_BASE_URL: "ws://localhost:8001",
|
WS_BASE_URL: "ws://localhost:8001",
|
||||||
SUPABASE_URL: "http://localhost:8000",
|
|
||||||
SUPABASE_ANON_KEY:
|
|
||||||
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE",
|
|
||||||
},
|
},
|
||||||
PROD: {
|
PROD: {
|
||||||
API_BASE_URL: "https://api.agpt.co",
|
API_BASE_URL: "https://api.agpt.co",
|
||||||
BUILDER_BASE_URL: "https://builder.agpt.co",
|
BUILDER_BASE_URL: "https://builder.agpt.co",
|
||||||
WS_BASE_URL: "wss://ws-server.agpt.co",
|
WS_BASE_URL: "wss://ws-server.agpt.co",
|
||||||
SUPABASE_URL: "https://supabase.agpt.co",
|
|
||||||
SUPABASE_ANON_KEY:
|
|
||||||
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6ImJnd3B3ZHN4YmxyeWloaW51dGJ4Iiwicm9sZSI6ImFub24iLCJpYXQiOjE3MzAyODYzMDUsImV4cCI6MjA0NTg2MjMwNX0.ISa2IofTdQIJmmX5JwKGGNajqjsD8bjaGBzK90SubE0",
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -4,22 +4,19 @@
|
|||||||
* Generate Pre-Authenticated Tokens for Load Testing
|
* Generate Pre-Authenticated Tokens for Load Testing
|
||||||
* Creates configs/pre-authenticated-tokens.js with 350+ tokens
|
* Creates configs/pre-authenticated-tokens.js with 350+ tokens
|
||||||
*
|
*
|
||||||
* This replaces the old token generation scripts with a clean, single script
|
* This uses the native auth API to generate tokens
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import https from "https";
|
import https from "https";
|
||||||
|
import http from "http";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
|
|
||||||
// Get Supabase service key from environment (REQUIRED for token generation)
|
// Get API base URL from environment (default to local)
|
||||||
const SUPABASE_SERVICE_KEY = process.env.SUPABASE_SERVICE_KEY;
|
const API_BASE_URL = process.env.API_BASE_URL || "http://localhost:8006";
|
||||||
|
const parsedUrl = new URL(API_BASE_URL);
|
||||||
if (!SUPABASE_SERVICE_KEY) {
|
const isHttps = parsedUrl.protocol === "https:";
|
||||||
console.error("❌ SUPABASE_SERVICE_KEY environment variable is required");
|
const httpModule = isHttps ? https : http;
|
||||||
console.error("Get service key from kubectl or environment:");
|
|
||||||
console.error('export SUPABASE_SERVICE_KEY="your-service-key"');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate test users (loadtest4-50 are known to work)
|
// Generate test users (loadtest4-50 are known to work)
|
||||||
const TEST_USERS = [];
|
const TEST_USERS = [];
|
||||||
@@ -31,7 +28,7 @@ for (let i = 4; i <= 50; i++) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
`🔐 Generating pre-authenticated tokens from ${TEST_USERS.length} users...`,
|
`Generating pre-authenticated tokens from ${TEST_USERS.length} users...`,
|
||||||
);
|
);
|
||||||
|
|
||||||
async function authenticateUser(user, attempt = 1) {
|
async function authenticateUser(user, attempt = 1) {
|
||||||
@@ -39,22 +36,20 @@ async function authenticateUser(user, attempt = 1) {
|
|||||||
const postData = JSON.stringify({
|
const postData = JSON.stringify({
|
||||||
email: user.email,
|
email: user.email,
|
||||||
password: user.password,
|
password: user.password,
|
||||||
expires_in: 86400, // 24 hours in seconds (24 * 60 * 60)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
hostname: "adfjtextkuilwuhzdjpf.supabase.co",
|
hostname: parsedUrl.hostname,
|
||||||
path: "/auth/v1/token?grant_type=password",
|
port: parsedUrl.port || (isHttps ? 443 : 80),
|
||||||
|
path: "/api/auth/login",
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${SUPABASE_SERVICE_KEY}`,
|
|
||||||
apikey: SUPABASE_SERVICE_KEY,
|
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"Content-Length": postData.length,
|
"Content-Length": postData.length,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const req = https.request(options, (res) => {
|
const req = httpModule.request(options, (res) => {
|
||||||
let data = "";
|
let data = "";
|
||||||
res.on("data", (chunk) => (data += chunk));
|
res.on("data", (chunk) => (data += chunk));
|
||||||
res.on("end", () => {
|
res.on("end", () => {
|
||||||
@@ -65,29 +60,29 @@ async function authenticateUser(user, attempt = 1) {
|
|||||||
} else if (res.statusCode === 429) {
|
} else if (res.statusCode === 429) {
|
||||||
// Rate limited - wait and retry
|
// Rate limited - wait and retry
|
||||||
console.log(
|
console.log(
|
||||||
`⏳ Rate limited for ${user.email}, waiting 5s (attempt ${attempt}/3)...`,
|
`Rate limited for ${user.email}, waiting 5s (attempt ${attempt}/3)...`,
|
||||||
);
|
);
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
if (attempt < 3) {
|
if (attempt < 3) {
|
||||||
authenticateUser(user, attempt + 1).then(resolve);
|
authenticateUser(user, attempt + 1).then(resolve);
|
||||||
} else {
|
} else {
|
||||||
console.log(`❌ Max retries exceeded for ${user.email}`);
|
console.log(`Max retries exceeded for ${user.email}`);
|
||||||
resolve(null);
|
resolve(null);
|
||||||
}
|
}
|
||||||
}, 5000);
|
}, 5000);
|
||||||
} else {
|
} else {
|
||||||
console.log(`❌ Auth failed for ${user.email}: ${res.statusCode}`);
|
console.log(`Auth failed for ${user.email}: ${res.statusCode}`);
|
||||||
resolve(null);
|
resolve(null);
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(`❌ Parse error for ${user.email}:`, e.message);
|
console.log(`Parse error for ${user.email}:`, e.message);
|
||||||
resolve(null);
|
resolve(null);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
req.on("error", (err) => {
|
req.on("error", (err) => {
|
||||||
console.log(`❌ Request error for ${user.email}:`, err.message);
|
console.log(`Request error for ${user.email}:`, err.message);
|
||||||
resolve(null);
|
resolve(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -97,7 +92,8 @@ async function authenticateUser(user, attempt = 1) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function generateTokens() {
|
async function generateTokens() {
|
||||||
console.log("🚀 Starting token generation...");
|
console.log("Starting token generation...");
|
||||||
|
console.log(`Using API: ${API_BASE_URL}`);
|
||||||
console.log("Rate limit aware - this will take ~10-15 minutes");
|
console.log("Rate limit aware - this will take ~10-15 minutes");
|
||||||
console.log("===========================================\n");
|
console.log("===========================================\n");
|
||||||
|
|
||||||
@@ -113,11 +109,11 @@ async function generateTokens() {
|
|||||||
150;
|
150;
|
||||||
const tokensPerUser = Math.ceil(targetTokens / TEST_USERS.length);
|
const tokensPerUser = Math.ceil(targetTokens / TEST_USERS.length);
|
||||||
console.log(
|
console.log(
|
||||||
`📊 Generating ${tokensPerUser} tokens per user (${TEST_USERS.length} users) - Target: ${targetTokens}\n`,
|
`Generating ${tokensPerUser} tokens per user (${TEST_USERS.length} users) - Target: ${targetTokens}\n`,
|
||||||
);
|
);
|
||||||
|
|
||||||
for (let round = 1; round <= tokensPerUser; round++) {
|
for (let round = 1; round <= tokensPerUser; round++) {
|
||||||
console.log(`🔄 Round ${round}/${tokensPerUser}:`);
|
console.log(`Round ${round}/${tokensPerUser}:`);
|
||||||
|
|
||||||
for (
|
for (
|
||||||
let i = 0;
|
let i = 0;
|
||||||
@@ -137,9 +133,9 @@ async function generateTokens() {
|
|||||||
generated: new Date().toISOString(),
|
generated: new Date().toISOString(),
|
||||||
round: round,
|
round: round,
|
||||||
});
|
});
|
||||||
console.log(`✅ (${tokens.length}/${targetTokens})`);
|
console.log(`OK (${tokens.length}/${targetTokens})`);
|
||||||
} else {
|
} else {
|
||||||
console.log(`❌`);
|
console.log(`FAILED`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Respect rate limits - wait 500ms between requests
|
// Respect rate limits - wait 500ms between requests
|
||||||
@@ -152,13 +148,13 @@ async function generateTokens() {
|
|||||||
|
|
||||||
// Wait longer between rounds
|
// Wait longer between rounds
|
||||||
if (round < tokensPerUser) {
|
if (round < tokensPerUser) {
|
||||||
console.log(` ⏸️ Waiting 3s before next round...\n`);
|
console.log(` Waiting 3s before next round...\n`);
|
||||||
await new Promise((resolve) => setTimeout(resolve, 3000));
|
await new Promise((resolve) => setTimeout(resolve, 3000));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const duration = Math.round((Date.now() - startTime) / 1000);
|
const duration = Math.round((Date.now() - startTime) / 1000);
|
||||||
console.log(`\n✅ Generated ${tokens.length} tokens in ${duration}s`);
|
console.log(`\nGenerated ${tokens.length} tokens in ${duration}s`);
|
||||||
|
|
||||||
// Create configs directory if it doesn't exist
|
// Create configs directory if it doesn't exist
|
||||||
const configsDir = path.join(process.cwd(), "configs");
|
const configsDir = path.join(process.cwd(), "configs");
|
||||||
@@ -172,8 +168,8 @@ async function generateTokens() {
|
|||||||
// Total tokens: ${tokens.length}
|
// Total tokens: ${tokens.length}
|
||||||
// Generation time: ${duration} seconds
|
// Generation time: ${duration} seconds
|
||||||
//
|
//
|
||||||
// ⚠️ SECURITY: This file contains real authentication tokens
|
// SECURITY: This file contains real authentication tokens
|
||||||
// ⚠️ DO NOT COMMIT TO GIT - File is gitignored
|
// DO NOT COMMIT TO GIT - File is gitignored
|
||||||
|
|
||||||
export const PRE_AUTHENTICATED_TOKENS = ${JSON.stringify(tokens, null, 2)};
|
export const PRE_AUTHENTICATED_TOKENS = ${JSON.stringify(tokens, null, 2)};
|
||||||
|
|
||||||
@@ -213,16 +209,16 @@ export const TOKEN_STATS = {
|
|||||||
generated: PRE_AUTHENTICATED_TOKENS[0]?.generated || 'unknown'
|
generated: PRE_AUTHENTICATED_TOKENS[0]?.generated || 'unknown'
|
||||||
};
|
};
|
||||||
|
|
||||||
console.log(\`🔐 Loaded \${TOKEN_STATS.total} pre-authenticated tokens from \${TOKEN_STATS.users} users\`);
|
console.log(\`Loaded \${TOKEN_STATS.total} pre-authenticated tokens from \${TOKEN_STATS.users} users\`);
|
||||||
`;
|
`;
|
||||||
|
|
||||||
const tokenFile = path.join(configsDir, "pre-authenticated-tokens.js");
|
const tokenFile = path.join(configsDir, "pre-authenticated-tokens.js");
|
||||||
fs.writeFileSync(tokenFile, jsContent);
|
fs.writeFileSync(tokenFile, jsContent);
|
||||||
|
|
||||||
console.log(`💾 Saved to configs/pre-authenticated-tokens.js`);
|
console.log(`Saved to configs/pre-authenticated-tokens.js`);
|
||||||
console.log(`🚀 Ready for ${tokens.length} concurrent VU load testing!`);
|
console.log(`Ready for ${tokens.length} concurrent VU load testing!`);
|
||||||
console.log(
|
console.log(
|
||||||
`\n🔒 Security Note: Token file is gitignored and will not be committed`,
|
`\nSecurity Note: Token file is gitignored and will not be committed`,
|
||||||
);
|
);
|
||||||
|
|
||||||
return tokens.length;
|
return tokens.length;
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ export default function () {
|
|||||||
// Handle authentication failure gracefully
|
// Handle authentication failure gracefully
|
||||||
if (!headers || !headers.Authorization) {
|
if (!headers || !headers.Authorization) {
|
||||||
console.log(
|
console.log(
|
||||||
`⚠️ VU ${__VU} has no valid pre-authentication token - skipping iteration`,
|
`VU ${__VU} has no valid pre-authentication token - skipping iteration`,
|
||||||
);
|
);
|
||||||
check(null, {
|
check(null, {
|
||||||
"Authentication: Failed gracefully without crashing VU": () => true,
|
"Authentication: Failed gracefully without crashing VU": () => true,
|
||||||
@@ -53,56 +53,57 @@ export default function () {
|
|||||||
return; // Exit iteration gracefully without crashing
|
return; // Exit iteration gracefully without crashing
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`🚀 VU ${__VU} making ${requestsPerVU} concurrent requests...`);
|
console.log(`VU ${__VU} making ${requestsPerVU} concurrent requests...`);
|
||||||
|
|
||||||
// Create array of request functions to run concurrently
|
// Create array of request functions to run concurrently
|
||||||
const requests = [];
|
const requests = [];
|
||||||
|
|
||||||
for (let i = 0; i < requestsPerVU; i++) {
|
for (let i = 0; i < requestsPerVU; i++) {
|
||||||
requests.push({
|
// Health check endpoint
|
||||||
method: "GET",
|
|
||||||
url: `${config.SUPABASE_URL}/rest/v1/`,
|
|
||||||
params: { headers: { apikey: config.SUPABASE_ANON_KEY } },
|
|
||||||
});
|
|
||||||
|
|
||||||
requests.push({
|
requests.push({
|
||||||
method: "GET",
|
method: "GET",
|
||||||
url: `${config.API_BASE_URL}/health`,
|
url: `${config.API_BASE_URL}/health`,
|
||||||
params: { headers },
|
params: { headers },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// API endpoint check
|
||||||
|
requests.push({
|
||||||
|
method: "GET",
|
||||||
|
url: `${config.API_BASE_URL}/api`,
|
||||||
|
params: { headers },
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Execute all requests concurrently
|
// Execute all requests concurrently
|
||||||
const responses = http.batch(requests);
|
const responses = http.batch(requests);
|
||||||
|
|
||||||
// Validate results
|
// Validate results
|
||||||
let supabaseSuccesses = 0;
|
let healthSuccesses = 0;
|
||||||
let backendSuccesses = 0;
|
let apiSuccesses = 0;
|
||||||
|
|
||||||
for (let i = 0; i < responses.length; i++) {
|
for (let i = 0; i < responses.length; i++) {
|
||||||
const response = responses[i];
|
const response = responses[i];
|
||||||
|
|
||||||
if (i % 2 === 0) {
|
if (i % 2 === 0) {
|
||||||
// Supabase request
|
// Health check request
|
||||||
const connectivityCheck = check(response, {
|
const healthCheck = check(response, {
|
||||||
"Supabase connectivity: Status is not 500": (r) => r.status !== 500,
|
"Health endpoint: Status is not 500": (r) => r.status !== 500,
|
||||||
"Supabase connectivity: Response time < 5s": (r) =>
|
"Health endpoint: Response time < 5s": (r) =>
|
||||||
r.timings.duration < 5000,
|
r.timings.duration < 5000,
|
||||||
});
|
});
|
||||||
if (connectivityCheck) supabaseSuccesses++;
|
if (healthCheck) healthSuccesses++;
|
||||||
} else {
|
} else {
|
||||||
// Backend request
|
// API request
|
||||||
const backendCheck = check(response, {
|
const apiCheck = check(response, {
|
||||||
"Backend server: Responds (any status)": (r) => r.status > 0,
|
"API server: Responds (any status)": (r) => r.status > 0,
|
||||||
"Backend server: Response time < 5s": (r) =>
|
"API server: Response time < 5s": (r) => r.timings.duration < 5000,
|
||||||
r.timings.duration < 5000,
|
|
||||||
});
|
});
|
||||||
if (backendCheck) backendSuccesses++;
|
if (apiCheck) apiSuccesses++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(
|
console.log(
|
||||||
`✅ VU ${__VU} completed: ${supabaseSuccesses}/${requestsPerVU} Supabase, ${backendSuccesses}/${requestsPerVU} backend requests successful`,
|
`VU ${__VU} completed: ${healthSuccesses}/${requestsPerVU} health, ${apiSuccesses}/${requestsPerVU} API requests successful`,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Basic auth validation (once per iteration)
|
// Basic auth validation (once per iteration)
|
||||||
@@ -125,7 +126,7 @@ export default function () {
|
|||||||
parts[2] && parts[2].length > 10,
|
parts[2] && parts[2].length > 10,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`💥 Test failed: ${error.message}`);
|
console.error(`Test failed: ${error.message}`);
|
||||||
check(null, {
|
check(null, {
|
||||||
"Test execution: No errors": () => false,
|
"Test execution: No errors": () => false,
|
||||||
});
|
});
|
||||||
@@ -133,5 +134,5 @@ export default function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function teardown(data) {
|
export function teardown(data) {
|
||||||
console.log(`🏁 Basic connectivity test completed`);
|
console.log(`Basic connectivity test completed`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,65 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- A unique constraint covering the columns `[googleId]` on the table `User` will be added. If there are existing duplicate values, this will fail.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "User" ADD COLUMN "googleId" TEXT,
|
||||||
|
ADD COLUMN "passwordHash" TEXT,
|
||||||
|
ADD COLUMN "role" TEXT NOT NULL DEFAULT 'authenticated',
|
||||||
|
ALTER COLUMN "emailVerified" SET DEFAULT false;
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "RefreshToken" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"token" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"expiresAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
"revokedAt" TIMESTAMP(3),
|
||||||
|
|
||||||
|
CONSTRAINT "RefreshToken_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "PasswordResetToken" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"token" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"expiresAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"usedAt" TIMESTAMP(3),
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "PasswordResetToken_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "RefreshToken_token_key" ON "RefreshToken"("token");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "RefreshToken_userId_idx" ON "RefreshToken"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "RefreshToken_expiresAt_idx" ON "RefreshToken"("expiresAt");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "RefreshToken_token_idx" ON "RefreshToken"("token");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "PasswordResetToken_token_key" ON "PasswordResetToken"("token");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "PasswordResetToken_userId_idx" ON "PasswordResetToken"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "PasswordResetToken_token_idx" ON "PasswordResetToken"("token");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "User_googleId_key" ON "User"("googleId");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "RefreshToken" ADD CONSTRAINT "RefreshToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "PasswordResetToken" ADD CONSTRAINT "PasswordResetToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
@@ -0,0 +1,23 @@
|
|||||||
|
-- CreateTable
|
||||||
|
CREATE TABLE "EmailVerificationToken" (
|
||||||
|
"id" TEXT NOT NULL,
|
||||||
|
"token" TEXT NOT NULL,
|
||||||
|
"userId" TEXT NOT NULL,
|
||||||
|
"expiresAt" TIMESTAMP(3) NOT NULL,
|
||||||
|
"usedAt" TIMESTAMP(3),
|
||||||
|
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
|
||||||
|
CONSTRAINT "EmailVerificationToken_pkey" PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE UNIQUE INDEX "EmailVerificationToken_token_key" ON "EmailVerificationToken"("token");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "EmailVerificationToken_userId_idx" ON "EmailVerificationToken"("userId");
|
||||||
|
|
||||||
|
-- CreateIndex
|
||||||
|
CREATE INDEX "EmailVerificationToken_token_idx" ON "EmailVerificationToken"("token");
|
||||||
|
|
||||||
|
-- AddForeignKey
|
||||||
|
ALTER TABLE "EmailVerificationToken" ADD CONSTRAINT "EmailVerificationToken_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||||
205
autogpt_platform/backend/poetry.lock
generated
205
autogpt_platform/backend/poetry.lock
generated
@@ -391,6 +391,21 @@ files = [
|
|||||||
{file = "audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0"},
|
{file = "audioop_lts-0.2.2.tar.gz", hash = "sha256:64d0c62d88e67b98a1a5e71987b7aa7b5bcffc7dcee65b635823dbdd0a8dbbd0"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "authlib"
|
||||||
|
version = "1.6.6"
|
||||||
|
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
groups = ["main"]
|
||||||
|
files = [
|
||||||
|
{file = "authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd"},
|
||||||
|
{file = "authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
cryptography = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "autogpt-libs"
|
name = "autogpt-libs"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@@ -402,6 +417,8 @@ files = []
|
|||||||
develop = true
|
develop = true
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
authlib = "^1.3.0"
|
||||||
|
bcrypt = "^4.1.0"
|
||||||
colorama = "^0.4.6"
|
colorama = "^0.4.6"
|
||||||
cryptography = "^45.0"
|
cryptography = "^45.0"
|
||||||
expiringdict = "^1.2.2"
|
expiringdict = "^1.2.2"
|
||||||
@@ -412,7 +429,6 @@ pydantic = "^2.11.7"
|
|||||||
pydantic-settings = "^2.10.1"
|
pydantic-settings = "^2.10.1"
|
||||||
pyjwt = {version = "^2.10.1", extras = ["crypto"]}
|
pyjwt = {version = "^2.10.1", extras = ["crypto"]}
|
||||||
redis = "^6.2.0"
|
redis = "^6.2.0"
|
||||||
supabase = "^2.16.0"
|
|
||||||
uvicorn = "^0.35.0"
|
uvicorn = "^0.35.0"
|
||||||
|
|
||||||
[package.source]
|
[package.source]
|
||||||
@@ -461,6 +477,71 @@ files = [
|
|||||||
docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||||
testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"]
|
testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "bcrypt"
|
||||||
|
version = "4.3.0"
|
||||||
|
description = "Modern password hashing for your software and your servers"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.8"
|
||||||
|
groups = ["main"]
|
||||||
|
files = [
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"},
|
||||||
|
{file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"},
|
||||||
|
{file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"},
|
||||||
|
{file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"},
|
||||||
|
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"},
|
||||||
|
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"},
|
||||||
|
{file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||||
|
typecheck = ["mypy"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "black"
|
name = "black"
|
||||||
version = "24.10.0"
|
version = "24.10.0"
|
||||||
@@ -981,21 +1062,6 @@ files = [
|
|||||||
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "deprecation"
|
|
||||||
version = "2.1.0"
|
|
||||||
description = "A library to handle automated deprecations"
|
|
||||||
optional = false
|
|
||||||
python-versions = "*"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
|
|
||||||
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
packaging = "*"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "discord-py"
|
name = "discord-py"
|
||||||
version = "2.5.2"
|
version = "2.5.2"
|
||||||
@@ -1889,23 +1955,6 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
requests = ">=2.20.0,<3.0"
|
requests = ">=2.20.0,<3.0"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "gotrue"
|
|
||||||
version = "2.12.3"
|
|
||||||
description = "Python Client Library for Supabase Auth"
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "gotrue-2.12.3-py3-none-any.whl", hash = "sha256:b1a3c6a5fe3f92e854a026c4c19de58706a96fd5fbdcc3d620b2802f6a46a26b"},
|
|
||||||
{file = "gotrue-2.12.3.tar.gz", hash = "sha256:f874cf9d0b2f0335bfbd0d6e29e3f7aff79998cd1c14d2ad814db8c06cee3852"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
pydantic = ">=1.10,<3"
|
|
||||||
pyjwt = ">=2.10.1,<3.0.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "gravitasml"
|
name = "gravitasml"
|
||||||
version = "0.1.3"
|
version = "0.1.3"
|
||||||
@@ -4060,24 +4109,6 @@ docs = ["sphinx (>=1.7.1)"]
|
|||||||
redis = ["redis"]
|
redis = ["redis"]
|
||||||
tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
|
tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "postgrest"
|
|
||||||
version = "1.1.1"
|
|
||||||
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a"},
|
|
||||||
{file = "postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
deprecation = ">=2.1.0,<3.0.0"
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
pydantic = ">=1.9,<3.0"
|
|
||||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "posthog"
|
name = "posthog"
|
||||||
version = "6.1.1"
|
version = "6.1.1"
|
||||||
@@ -5322,23 +5353,6 @@ files = [
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
all = ["numpy"]
|
all = ["numpy"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "realtime"
|
|
||||||
version = "2.6.0"
|
|
||||||
description = ""
|
|
||||||
optional = false
|
|
||||||
python-versions = ">=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "realtime-2.6.0-py3-none-any.whl", hash = "sha256:a0512d71044c2621455bc87d1c171739967edc161381994de54e0989ca6c348e"},
|
|
||||||
{file = "realtime-2.6.0.tar.gz", hash = "sha256:f68743cff85d3113659fa19835a868674e720465649bf833e1cd47d7da0f7bbd"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
pydantic = ">=2.11.7,<3.0.0"
|
|
||||||
typing-extensions = ">=4.14.0"
|
|
||||||
websockets = ">=11,<16"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "redis"
|
name = "redis"
|
||||||
version = "6.2.0"
|
version = "6.2.0"
|
||||||
@@ -6100,23 +6114,6 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
|
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "storage3"
|
|
||||||
version = "0.12.0"
|
|
||||||
description = "Supabase Storage client for Python."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "storage3-0.12.0-py3-none-any.whl", hash = "sha256:1c4585693ca42243ded1512b58e54c697111e91a20916cd14783eebc37e7c87d"},
|
|
||||||
{file = "storage3-0.12.0.tar.gz", hash = "sha256:94243f20922d57738bf42e96b9f5582b4d166e8bf209eccf20b146909f3f71b0"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
deprecation = ">=2.1.0,<3.0.0"
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
python-dateutil = ">=2.8.2,<3.0.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "strenum"
|
name = "strenum"
|
||||||
version = "0.4.15"
|
version = "0.4.15"
|
||||||
@@ -6150,42 +6147,6 @@ files = [
|
|||||||
requests = {version = ">=2.20", markers = "python_version >= \"3.0\""}
|
requests = {version = ">=2.20", markers = "python_version >= \"3.0\""}
|
||||||
typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
|
typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supabase"
|
|
||||||
version = "2.17.0"
|
|
||||||
description = "Supabase client for Python."
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "supabase-2.17.0-py3-none-any.whl", hash = "sha256:2dd804fae8850cebccc9ab8711c2ee9e2f009e847f4c95c092a4423778e3c3f6"},
|
|
||||||
{file = "supabase-2.17.0.tar.gz", hash = "sha256:3207314b540db7e3339fa2500bd977541517afb4d20b7ff93a89b97a05f9df38"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
gotrue = "2.12.3"
|
|
||||||
httpx = ">=0.26,<0.29"
|
|
||||||
postgrest = "1.1.1"
|
|
||||||
realtime = "2.6.0"
|
|
||||||
storage3 = "0.12.0"
|
|
||||||
supafunc = "0.10.1"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "supafunc"
|
|
||||||
version = "0.10.1"
|
|
||||||
description = "Library for Supabase Functions"
|
|
||||||
optional = false
|
|
||||||
python-versions = "<4.0,>=3.9"
|
|
||||||
groups = ["main"]
|
|
||||||
files = [
|
|
||||||
{file = "supafunc-0.10.1-py3-none-any.whl", hash = "sha256:26df9bd25ff2ef56cb5bfb8962de98f43331f7f8ff69572bac3ed9c3a9672040"},
|
|
||||||
{file = "supafunc-0.10.1.tar.gz", hash = "sha256:a5b33c8baecb6b5297d25da29a2503e2ec67ee6986f3d44c137e651b8a59a17d"},
|
|
||||||
]
|
|
||||||
|
|
||||||
[package.dependencies]
|
|
||||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|
||||||
strenum = ">=0.4.15,<0.5.0"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tenacity"
|
name = "tenacity"
|
||||||
version = "9.1.2"
|
version = "9.1.2"
|
||||||
@@ -7279,4 +7240,4 @@ cffi = ["cffi (>=1.11)"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.1"
|
lock-version = "2.1"
|
||||||
python-versions = ">=3.10,<3.14"
|
python-versions = ">=3.10,<3.14"
|
||||||
content-hash = "13b191b2a1989d3321ff713c66ff6f5f4f3b82d15df4d407e0e5dbf87d7522c4"
|
content-hash = "d0beae09baf94b9a5e7ec787f7da14c9268da37b1dcde7f582b948f2ff121843"
|
||||||
|
|||||||
@@ -62,7 +62,6 @@ sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlal
|
|||||||
sqlalchemy = "^2.0.40"
|
sqlalchemy = "^2.0.40"
|
||||||
strenum = "^0.4.9"
|
strenum = "^0.4.9"
|
||||||
stripe = "^11.5.0"
|
stripe = "^11.5.0"
|
||||||
supabase = "2.17.0"
|
|
||||||
tenacity = "^9.1.2"
|
tenacity = "^9.1.2"
|
||||||
todoist-api-python = "^2.1.7"
|
todoist-api-python = "^2.1.7"
|
||||||
tweepy = "^4.16.0"
|
tweepy = "^4.16.0"
|
||||||
@@ -82,6 +81,7 @@ firecrawl-py = "^4.3.6"
|
|||||||
exa-py = "^1.14.20"
|
exa-py = "^1.14.20"
|
||||||
croniter = "^6.0.0"
|
croniter = "^6.0.0"
|
||||||
stagehand = "^0.5.1"
|
stagehand = "^0.5.1"
|
||||||
|
bcrypt = ">=4.1.0,<5.0.0"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
aiohappyeyeballs = "^2.6.1"
|
aiohappyeyeballs = "^2.6.1"
|
||||||
|
|||||||
@@ -12,11 +12,11 @@ generator client {
|
|||||||
partial_type_generator = "backend/data/partial_types.py"
|
partial_type_generator = "backend/data/partial_types.py"
|
||||||
}
|
}
|
||||||
|
|
||||||
// User model to mirror Auth provider users
|
// User model for authentication and platform data
|
||||||
model User {
|
model User {
|
||||||
id String @id // This should match the Supabase user ID
|
id String @id @default(uuid())
|
||||||
email String @unique
|
email String @unique
|
||||||
emailVerified Boolean @default(true)
|
emailVerified Boolean @default(false)
|
||||||
name String?
|
name String?
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
@@ -25,6 +25,11 @@ model User {
|
|||||||
stripeCustomerId String?
|
stripeCustomerId String?
|
||||||
topUpConfig Json?
|
topUpConfig Json?
|
||||||
|
|
||||||
|
// Authentication fields
|
||||||
|
passwordHash String? // bcrypt hash (nullable for OAuth-only users)
|
||||||
|
googleId String? @unique // Google OAuth user ID
|
||||||
|
role String @default("authenticated") // user role
|
||||||
|
|
||||||
maxEmailsPerDay Int @default(3)
|
maxEmailsPerDay Int @default(3)
|
||||||
notifyOnAgentRun Boolean @default(true)
|
notifyOnAgentRun Boolean @default(true)
|
||||||
notifyOnZeroBalance Boolean @default(true)
|
notifyOnZeroBalance Boolean @default(true)
|
||||||
@@ -39,6 +44,11 @@ model User {
|
|||||||
|
|
||||||
timezone String @default("not-set")
|
timezone String @default("not-set")
|
||||||
|
|
||||||
|
// Auth token relations
|
||||||
|
RefreshTokens RefreshToken[]
|
||||||
|
PasswordResetTokens PasswordResetToken[]
|
||||||
|
EmailVerificationTokens EmailVerificationToken[]
|
||||||
|
|
||||||
// Relations
|
// Relations
|
||||||
|
|
||||||
AgentGraphs AgentGraph[]
|
AgentGraphs AgentGraph[]
|
||||||
@@ -69,6 +79,49 @@ model User {
|
|||||||
OAuthRefreshTokens OAuthRefreshToken[]
|
OAuthRefreshTokens OAuthRefreshToken[]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Refresh tokens for JWT authentication
|
||||||
|
model RefreshToken {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
token String @unique // SHA-256 hashed refresh token
|
||||||
|
userId String
|
||||||
|
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
expiresAt DateTime
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
revokedAt DateTime?
|
||||||
|
|
||||||
|
@@index([userId])
|
||||||
|
@@index([expiresAt])
|
||||||
|
@@index([token])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Password reset tokens
|
||||||
|
model PasswordResetToken {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
token String @unique // SHA-256 hashed token
|
||||||
|
userId String
|
||||||
|
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
expiresAt DateTime
|
||||||
|
usedAt DateTime?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@index([userId])
|
||||||
|
@@index([token])
|
||||||
|
}
|
||||||
|
|
||||||
|
// Email verification tokens
|
||||||
|
model EmailVerificationToken {
|
||||||
|
id String @id @default(uuid())
|
||||||
|
token String @unique // SHA-256 hashed token
|
||||||
|
userId String
|
||||||
|
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||||
|
expiresAt DateTime
|
||||||
|
usedAt DateTime?
|
||||||
|
createdAt DateTime @default(now())
|
||||||
|
|
||||||
|
@@index([userId])
|
||||||
|
@@index([token])
|
||||||
|
}
|
||||||
|
|
||||||
enum OnboardingStep {
|
enum OnboardingStep {
|
||||||
// Introductory onboarding (Library)
|
// Introductory onboarding (Library)
|
||||||
WELCOME
|
WELCOME
|
||||||
|
|||||||
254
autogpt_platform/backend/scripts/migrate_big_tables.sh
Executable file
254
autogpt_platform/backend/scripts/migrate_big_tables.sh
Executable file
@@ -0,0 +1,254 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Migrate Large Tables: Stream execution history from source to destination
|
||||||
|
#
|
||||||
|
# This script streams the large execution tables that were excluded from
|
||||||
|
# the initial migration. Run this AFTER migrate_to_gcp.sh completes.
|
||||||
|
#
|
||||||
|
# Tables migrated (in order of size):
|
||||||
|
# - NotificationEvent (94 MB)
|
||||||
|
# - AgentNodeExecutionKeyValueData (792 KB)
|
||||||
|
# - AgentGraphExecution (1.3 GB)
|
||||||
|
# - AgentNodeExecution (6 GB)
|
||||||
|
# - AgentNodeExecutionInputOutput (30 GB)
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/migrate_big_tables.sh \
|
||||||
|
# --source 'postgresql://user:pass@host:5432/db?schema=platform' \
|
||||||
|
# --dest 'postgresql://user:pass@host:5432/db?schema=platform'
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# --table <name> Migrate only a specific table
|
||||||
|
# --dry-run Show what would be done without migrating
|
||||||
|
#
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Colors
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||||
|
log_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||||
|
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||||
|
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||||
|
|
||||||
|
# Arguments
|
||||||
|
SOURCE_URL=""
|
||||||
|
DEST_URL=""
|
||||||
|
DRY_RUN=false
|
||||||
|
SINGLE_TABLE=""
|
||||||
|
|
||||||
|
# Tables to migrate (ordered smallest to largest)
|
||||||
|
TABLES=(
|
||||||
|
"NotificationEvent"
|
||||||
|
"AgentNodeExecutionKeyValueData"
|
||||||
|
"AgentGraphExecution"
|
||||||
|
"AgentNodeExecution"
|
||||||
|
"AgentNodeExecutionInputOutput"
|
||||||
|
)
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") --source <url> --dest <url> [options]
|
||||||
|
|
||||||
|
Required:
|
||||||
|
--source <url> Source database URL with ?schema=platform
|
||||||
|
--dest <url> Destination database URL with ?schema=platform
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--table <name> Migrate only a specific table (e.g., AgentGraphExecution)
|
||||||
|
--dry-run Show what would be done without migrating
|
||||||
|
--help Show this help
|
||||||
|
|
||||||
|
Tables migrated (in order):
|
||||||
|
1. NotificationEvent (94 MB)
|
||||||
|
2. AgentNodeExecutionKeyValueData (792 KB)
|
||||||
|
3. AgentGraphExecution (1.3 GB)
|
||||||
|
4. AgentNodeExecution (6 GB)
|
||||||
|
5. AgentNodeExecutionInputOutput (30 GB)
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_args() {
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--source) SOURCE_URL="$2"; shift 2 ;;
|
||||||
|
--dest) DEST_URL="$2"; shift 2 ;;
|
||||||
|
--table) SINGLE_TABLE="$2"; shift 2 ;;
|
||||||
|
--dry-run) DRY_RUN=true; shift ;;
|
||||||
|
--help|-h) usage ;;
|
||||||
|
*) log_error "Unknown option: $1"; usage ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$SOURCE_URL" ]]; then
|
||||||
|
log_error "Missing --source"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "$DEST_URL" ]]; then
|
||||||
|
log_error "Missing --dest"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
get_schema_from_url() {
|
||||||
|
local url="$1"
|
||||||
|
local schema=$(echo "$url" | sed -n 's/.*schema=\([^&]*\).*/\1/p')
|
||||||
|
echo "${schema:-platform}"
|
||||||
|
}
|
||||||
|
|
||||||
|
get_base_url() {
|
||||||
|
local url="$1"
|
||||||
|
echo "${url%%\?*}"
|
||||||
|
}
|
||||||
|
|
||||||
|
get_table_size() {
|
||||||
|
local base_url="$1"
|
||||||
|
local schema="$2"
|
||||||
|
local table="$3"
|
||||||
|
|
||||||
|
psql "${base_url}" -t -c "
|
||||||
|
SELECT pg_size_pretty(pg_total_relation_size('${schema}.\"${table}\"'))
|
||||||
|
" 2>/dev/null | tr -d ' ' || echo "unknown"
|
||||||
|
}
|
||||||
|
|
||||||
|
get_table_count() {
|
||||||
|
local base_url="$1"
|
||||||
|
local schema="$2"
|
||||||
|
local table="$3"
|
||||||
|
|
||||||
|
psql "${base_url}" -t -c "
|
||||||
|
SELECT COUNT(*) FROM ${schema}.\"${table}\"
|
||||||
|
" 2>/dev/null | tr -d ' ' || echo "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
migrate_table() {
|
||||||
|
local table="$1"
|
||||||
|
local source_base=$(get_base_url "$SOURCE_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
local schema=$(get_schema_from_url "$SOURCE_URL")
|
||||||
|
|
||||||
|
log_info "=== Migrating ${table} ==="
|
||||||
|
|
||||||
|
# Get source stats
|
||||||
|
local size=$(get_table_size "$source_base" "$schema" "$table")
|
||||||
|
local count=$(get_table_count "$source_base" "$schema" "$table")
|
||||||
|
log_info "Source: ${count} rows (${size})"
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would stream ${table} from source to destination"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if destination already has data
|
||||||
|
local dest_count=$(get_table_count "$dest_base" "$schema" "$table")
|
||||||
|
if [[ "$dest_count" != "0" ]]; then
|
||||||
|
log_warn "Destination already has ${dest_count} rows in ${table}"
|
||||||
|
read -p "Continue and add more rows? (y/N) " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
log_info "Skipping ${table}"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Streaming ${table} (this may take a while for large tables)..."
|
||||||
|
local start_time=$(date +%s)
|
||||||
|
|
||||||
|
# Stream directly from source to destination
|
||||||
|
pg_dump "${source_base}" \
|
||||||
|
--table="${schema}.\"${table}\"" \
|
||||||
|
--data-only \
|
||||||
|
--no-owner \
|
||||||
|
--no-privileges \
|
||||||
|
2>/dev/null \
|
||||||
|
| grep -v '\\restrict' \
|
||||||
|
| psql "${dest_base}" -q
|
||||||
|
|
||||||
|
local end_time=$(date +%s)
|
||||||
|
local duration=$((end_time - start_time))
|
||||||
|
|
||||||
|
# Verify
|
||||||
|
local new_dest_count=$(get_table_count "$dest_base" "$schema" "$table")
|
||||||
|
log_success "${table}: ${new_dest_count} rows migrated in ${duration}s"
|
||||||
|
}
|
||||||
|
|
||||||
|
main() {
|
||||||
|
echo ""
|
||||||
|
echo "========================================"
|
||||||
|
echo " Migrate Large Tables"
|
||||||
|
echo "========================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
parse_args "$@"
|
||||||
|
|
||||||
|
local source_base=$(get_base_url "$SOURCE_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "Source: ${source_base}"
|
||||||
|
log_info "Destination: ${dest_base}"
|
||||||
|
[[ "$DRY_RUN" == true ]] && log_warn "DRY RUN MODE"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Test connections
|
||||||
|
log_info "Testing connections..."
|
||||||
|
if ! psql "${source_base}" -c "SELECT 1" > /dev/null 2>&1; then
|
||||||
|
log_error "Cannot connect to source"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
if ! psql "${dest_base}" -c "SELECT 1" > /dev/null 2>&1; then
|
||||||
|
log_error "Cannot connect to destination"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_success "Connections OK"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Determine which tables to migrate
|
||||||
|
local tables_to_migrate=()
|
||||||
|
if [[ -n "$SINGLE_TABLE" ]]; then
|
||||||
|
tables_to_migrate=("$SINGLE_TABLE")
|
||||||
|
else
|
||||||
|
tables_to_migrate=("${TABLES[@]}")
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show plan
|
||||||
|
log_info "Tables to migrate:"
|
||||||
|
local schema=$(get_schema_from_url "$SOURCE_URL")
|
||||||
|
for table in "${tables_to_migrate[@]}"; do
|
||||||
|
local size=$(get_table_size "$source_base" "$schema" "$table")
|
||||||
|
echo " - ${table} (${size})"
|
||||||
|
done
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" != true ]]; then
|
||||||
|
log_warn "This will stream large amounts of data to the destination."
|
||||||
|
read -p "Continue? (y/N) " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
|
||||||
|
log_info "Cancelled"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "Starting migration at $(date)"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Migrate each table
|
||||||
|
for table in "${tables_to_migrate[@]}"; do
|
||||||
|
migrate_table "$table"
|
||||||
|
echo ""
|
||||||
|
done
|
||||||
|
|
||||||
|
log_success "Migration completed at $(date)"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
271
autogpt_platform/backend/scripts/migrate_supabase_users.py
Executable file
271
autogpt_platform/backend/scripts/migrate_supabase_users.py
Executable file
@@ -0,0 +1,271 @@
|
|||||||
|
"""
|
||||||
|
Migration script to copy password hashes from Supabase auth.users to platform.User.
|
||||||
|
|
||||||
|
This script should be run BEFORE removing Supabase services to preserve user credentials.
|
||||||
|
It copies bcrypt password hashes from Supabase's auth.users table to the platform.User table,
|
||||||
|
allowing users to continue using their existing passwords after the migration.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
cd backend
|
||||||
|
poetry run python scripts/migrate_supabase_users.py [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dry-run Preview what would be migrated without making changes
|
||||||
|
--database-url <url> Database URL (overrides DATABASE_URL env var)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Using environment variable
|
||||||
|
poetry run python scripts/migrate_supabase_users.py --dry-run
|
||||||
|
|
||||||
|
# Using explicit database URL
|
||||||
|
poetry run python scripts/migrate_supabase_users.py \
|
||||||
|
--database-url "postgresql://user:pass@host:5432/db?schema=platform"
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
- Supabase services must be running (auth.users table must exist)
|
||||||
|
- Database migration 'add_native_auth' must be applied first
|
||||||
|
- Either DATABASE_URL env var or --database-url must be provided
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from prisma import Prisma
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate_credentials(db: Prisma) -> int:
|
||||||
|
"""
|
||||||
|
Copy bcrypt password hashes from auth.users to platform.User.
|
||||||
|
|
||||||
|
Returns the number of users updated.
|
||||||
|
"""
|
||||||
|
logger.info("Migrating user credentials from auth.users to platform.User...")
|
||||||
|
|
||||||
|
result = await db.execute_raw(
|
||||||
|
"""
|
||||||
|
UPDATE platform."User" u
|
||||||
|
SET
|
||||||
|
"passwordHash" = a.encrypted_password,
|
||||||
|
"emailVerified" = (a.email_confirmed_at IS NOT NULL)
|
||||||
|
FROM auth.users a
|
||||||
|
WHERE u.id::text = a.id::text
|
||||||
|
AND a.encrypted_password IS NOT NULL
|
||||||
|
AND u."passwordHash" IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Updated {result} users with credentials")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate_google_oauth_users(db: Prisma) -> int:
|
||||||
|
"""
|
||||||
|
Copy Google OAuth user IDs from auth.users to platform.User.
|
||||||
|
|
||||||
|
Returns the number of users updated.
|
||||||
|
"""
|
||||||
|
logger.info("Migrating Google OAuth users from auth.users to platform.User...")
|
||||||
|
|
||||||
|
result = await db.execute_raw(
|
||||||
|
"""
|
||||||
|
UPDATE platform."User" u
|
||||||
|
SET "googleId" = (a.raw_app_meta_data->>'provider_id')::text
|
||||||
|
FROM auth.users a
|
||||||
|
WHERE u.id::text = a.id::text
|
||||||
|
AND a.raw_app_meta_data->>'provider' = 'google'
|
||||||
|
AND a.raw_app_meta_data->>'provider_id' IS NOT NULL
|
||||||
|
AND u."googleId" IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Updated {result} users with Google OAuth IDs")
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
async def get_migration_stats(db: Prisma) -> dict:
|
||||||
|
"""Get statistics about the migration."""
|
||||||
|
# Count users in platform.User
|
||||||
|
platform_users = await db.user.count()
|
||||||
|
|
||||||
|
# Count users with credentials (not null)
|
||||||
|
users_with_credentials = await db.user.count(
|
||||||
|
where={"passwordHash": {"not": None}} # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count users with Google OAuth (not null)
|
||||||
|
users_with_google = await db.user.count(
|
||||||
|
where={"googleId": {"not": None}} # type: ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count users without any auth method
|
||||||
|
users_without_auth = await db.user.count(
|
||||||
|
where={"passwordHash": None, "googleId": None}
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_platform_users": platform_users,
|
||||||
|
"users_with_credentials": users_with_credentials,
|
||||||
|
"users_with_google_oauth": users_with_google,
|
||||||
|
"users_without_auth": users_without_auth,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_auth_users_exist(db: Prisma) -> bool:
|
||||||
|
"""Check if auth.users table exists and has data."""
|
||||||
|
try:
|
||||||
|
result = await db.query_raw("SELECT COUNT(*) as count FROM auth.users")
|
||||||
|
count = result[0]["count"] if result else 0
|
||||||
|
logger.info(f"Found {count} users in auth.users table")
|
||||||
|
return count > 0
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Cannot access auth.users table: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def preview_migration(db: Prisma) -> dict:
|
||||||
|
"""Preview what would be migrated without making changes."""
|
||||||
|
logger.info("Previewing migration (dry-run mode)...")
|
||||||
|
|
||||||
|
# Count users that would have credentials migrated
|
||||||
|
credentials_preview = await db.query_raw(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM platform."User" u
|
||||||
|
JOIN auth.users a ON u.id::text = a.id::text
|
||||||
|
WHERE a.encrypted_password IS NOT NULL
|
||||||
|
AND u."passwordHash" IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
credentials_to_migrate = (
|
||||||
|
credentials_preview[0]["count"] if credentials_preview else 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Count users that would have Google OAuth migrated
|
||||||
|
google_preview = await db.query_raw(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM platform."User" u
|
||||||
|
JOIN auth.users a ON u.id::text = a.id::text
|
||||||
|
WHERE a.raw_app_meta_data->>'provider' = 'google'
|
||||||
|
AND a.raw_app_meta_data->>'provider_id' IS NOT NULL
|
||||||
|
AND u."googleId" IS NULL
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
google_to_migrate = google_preview[0]["count"] if google_preview else 0
|
||||||
|
|
||||||
|
return {
|
||||||
|
"credentials_to_migrate": credentials_to_migrate,
|
||||||
|
"google_oauth_to_migrate": google_to_migrate,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def main(dry_run: bool = False):
|
||||||
|
"""Run the migration."""
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info("Supabase User Migration Script")
|
||||||
|
if dry_run:
|
||||||
|
logger.info(">>> DRY RUN MODE - No changes will be made <<<")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(f"Started at: {datetime.now().isoformat()}")
|
||||||
|
|
||||||
|
db = Prisma()
|
||||||
|
await db.connect()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if auth.users exists
|
||||||
|
if not await verify_auth_users_exist(db):
|
||||||
|
logger.error(
|
||||||
|
"Cannot find auth.users table or it's empty. "
|
||||||
|
"Make sure Supabase is running and has users."
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Get stats before migration
|
||||||
|
logger.info("\n--- Current State ---")
|
||||||
|
stats_before = await get_migration_stats(db)
|
||||||
|
for key, value in stats_before.items():
|
||||||
|
logger.info(f" {key}: {value}")
|
||||||
|
|
||||||
|
if dry_run:
|
||||||
|
# Preview mode - show what would be migrated
|
||||||
|
logger.info("\n--- Preview (would be migrated) ---")
|
||||||
|
preview = await preview_migration(db)
|
||||||
|
logger.info(
|
||||||
|
f" Credentials to migrate: {preview['credentials_to_migrate']}"
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f" Google OAuth IDs to migrate: {preview['google_oauth_to_migrate']}"
|
||||||
|
)
|
||||||
|
logger.info("\n" + "=" * 60)
|
||||||
|
logger.info("Dry run complete. Run without --dry-run to perform migration.")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
else:
|
||||||
|
# Run actual migrations
|
||||||
|
logger.info("\n--- Running Migration ---")
|
||||||
|
credentials_migrated = await migrate_credentials(db)
|
||||||
|
google_migrated = await migrate_google_oauth_users(db)
|
||||||
|
|
||||||
|
# Get stats after migration
|
||||||
|
logger.info("\n--- After Migration ---")
|
||||||
|
stats_after = await get_migration_stats(db)
|
||||||
|
for key, value in stats_after.items():
|
||||||
|
logger.info(f" {key}: {value}")
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
logger.info("\n--- Summary ---")
|
||||||
|
logger.info(f"Credentials migrated: {credentials_migrated}")
|
||||||
|
logger.info(f"Google OAuth IDs migrated: {google_migrated}")
|
||||||
|
logger.info(
|
||||||
|
f"Users still without auth: {stats_after['users_without_auth']} "
|
||||||
|
"(these may be OAuth users from other providers)"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("\n" + "=" * 60)
|
||||||
|
logger.info("Migration completed successfully!")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Migration failed: {e}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await db.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
"""Parse command line arguments."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Migrate user auth data from Supabase to native auth"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Preview what would be migrated without making changes",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--database-url",
|
||||||
|
type=str,
|
||||||
|
help="Database URL (overrides DATABASE_URL env var)",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import os
|
||||||
|
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
# Override DATABASE_URL if provided via command line
|
||||||
|
if args.database_url:
|
||||||
|
os.environ["DATABASE_URL"] = args.database_url
|
||||||
|
os.environ["DIRECT_URL"] = args.database_url
|
||||||
|
|
||||||
|
asyncio.run(main(dry_run=args.dry_run))
|
||||||
482
autogpt_platform/backend/scripts/migrate_to_gcp.sh
Executable file
482
autogpt_platform/backend/scripts/migrate_to_gcp.sh
Executable file
@@ -0,0 +1,482 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Database Migration Script: Supabase to GCP Cloud SQL
|
||||||
|
#
|
||||||
|
# This script migrates the AutoGPT Platform database from Supabase to a new PostgreSQL instance.
|
||||||
|
#
|
||||||
|
# Migration Steps:
|
||||||
|
# 0. Nuke destination database (drop schema, recreate, apply migrations)
|
||||||
|
# 1. Export platform schema data from source
|
||||||
|
# 2. Export auth.users data from source (for password hashes, OAuth IDs)
|
||||||
|
# 3. Import platform schema data to destination
|
||||||
|
# 4. Update User table in destination with auth data
|
||||||
|
# 5. Refresh materialized views
|
||||||
|
#
|
||||||
|
# Prerequisites:
|
||||||
|
# - pg_dump and psql (PostgreSQL 15+)
|
||||||
|
# - poetry installed (for Prisma migrations)
|
||||||
|
# - Source and destination databases accessible
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./scripts/migrate_to_gcp.sh \
|
||||||
|
# --source 'postgresql://user:pass@host:5432/db?schema=platform' \
|
||||||
|
# --dest 'postgresql://user:pass@host:5432/db?schema=platform'
|
||||||
|
#
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Colors for output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
BLUE='\033[0;34m'
|
||||||
|
NC='\033[0m'
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
BACKEND_DIR="$(dirname "$SCRIPT_DIR")"
|
||||||
|
BACKUP_DIR="${BACKEND_DIR}/migration_backups"
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
|
||||||
|
# Command line arguments
|
||||||
|
SOURCE_URL=""
|
||||||
|
DEST_URL=""
|
||||||
|
DRY_RUN=false
|
||||||
|
|
||||||
|
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||||
|
log_success() { echo -e "${GREEN}[SUCCESS]${NC} $1"; }
|
||||||
|
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||||
|
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $(basename "$0") --source <url> --dest <url> [options]
|
||||||
|
|
||||||
|
Required:
|
||||||
|
--source <url> Source database URL with ?schema=platform
|
||||||
|
--dest <url> Destination database URL with ?schema=platform
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dry-run Preview without making changes
|
||||||
|
--help Show this help
|
||||||
|
|
||||||
|
Migration Steps:
|
||||||
|
0. Nuke destination database (DROP SCHEMA, recreate, apply Prisma migrations)
|
||||||
|
1. Export platform schema data from source (READ-ONLY)
|
||||||
|
2. Export auth.users data from source (READ-ONLY)
|
||||||
|
3. Import platform data to destination
|
||||||
|
4. Update User table with auth data (passwords, OAuth IDs)
|
||||||
|
5. Refresh materialized views
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_args() {
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--source) SOURCE_URL="$2"; shift 2 ;;
|
||||||
|
--dest) DEST_URL="$2"; shift 2 ;;
|
||||||
|
--dry-run) DRY_RUN=true; shift ;;
|
||||||
|
--help|-h) usage ;;
|
||||||
|
*) log_error "Unknown option: $1"; usage ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z "$SOURCE_URL" ]]; then
|
||||||
|
log_error "Missing --source"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "$DEST_URL" ]]; then
|
||||||
|
log_error "Missing --dest"
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
get_schema_from_url() {
|
||||||
|
local url="$1"
|
||||||
|
local schema=$(echo "$url" | sed -n 's/.*schema=\([^&]*\).*/\1/p')
|
||||||
|
echo "${schema:-platform}"
|
||||||
|
}
|
||||||
|
|
||||||
|
get_base_url() {
|
||||||
|
local url="$1"
|
||||||
|
echo "${url%%\?*}"
|
||||||
|
}
|
||||||
|
|
||||||
|
test_connections() {
|
||||||
|
local source_base=$(get_base_url "$SOURCE_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "Testing source connection..."
|
||||||
|
if ! psql "${source_base}" -c "SELECT 1" > /dev/null 2>&1; then
|
||||||
|
log_error "Cannot connect to source database"
|
||||||
|
psql "${source_base}" -c "SELECT 1" 2>&1 || true
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_success "Source connection OK"
|
||||||
|
|
||||||
|
log_info "Testing destination connection..."
|
||||||
|
if ! psql "${dest_base}" -c "SELECT 1" > /dev/null 2>&1; then
|
||||||
|
log_error "Cannot connect to destination database"
|
||||||
|
psql "${dest_base}" -c "SELECT 1" 2>&1 || true
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
log_success "Destination connection OK"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 0: Nuke destination database
|
||||||
|
# ============================================
|
||||||
|
nuke_destination() {
|
||||||
|
local schema=$(get_schema_from_url "$DEST_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "=== STEP 0: Nuking destination database ==="
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would drop and recreate schema '${schema}' in destination"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Show what exists in destination
|
||||||
|
log_info "Current destination state:"
|
||||||
|
local user_count=$(psql "${dest_base}" -t -c "SELECT COUNT(*) FROM ${schema}.\"User\"" 2>/dev/null | tr -d ' ' || echo "0")
|
||||||
|
local graph_count=$(psql "${dest_base}" -t -c "SELECT COUNT(*) FROM ${schema}.\"AgentGraph\"" 2>/dev/null | tr -d ' ' || echo "0")
|
||||||
|
echo " - Users: ${user_count}"
|
||||||
|
echo " - AgentGraphs: ${graph_count}"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_warn "⚠️ WARNING: This will PERMANENTLY DELETE all data in the destination database!"
|
||||||
|
log_warn "Schema '${schema}' will be dropped and recreated."
|
||||||
|
echo ""
|
||||||
|
read -p "Type 'NUKE' to confirm deletion: " -r
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ "$REPLY" != "NUKE" ]]; then
|
||||||
|
log_info "Cancelled - destination not modified"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Dropping schema '${schema}'..."
|
||||||
|
psql "${dest_base}" -c "DROP SCHEMA IF EXISTS ${schema} CASCADE;"
|
||||||
|
|
||||||
|
log_info "Recreating schema '${schema}'..."
|
||||||
|
psql "${dest_base}" -c "CREATE SCHEMA ${schema};"
|
||||||
|
|
||||||
|
log_info "Applying Prisma migrations..."
|
||||||
|
cd "${BACKEND_DIR}"
|
||||||
|
DATABASE_URL="${DEST_URL}" DIRECT_URL="${DEST_URL}" poetry run prisma migrate deploy
|
||||||
|
|
||||||
|
log_success "Destination database reset complete"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 1: Export platform schema data
|
||||||
|
# ============================================
|
||||||
|
export_platform_data() {
|
||||||
|
local schema=$(get_schema_from_url "$SOURCE_URL")
|
||||||
|
local base_url=$(get_base_url "$SOURCE_URL")
|
||||||
|
local output_file="${BACKUP_DIR}/platform_data_${TIMESTAMP}.sql"
|
||||||
|
|
||||||
|
log_info "=== STEP 1: Exporting platform schema data ==="
|
||||||
|
mkdir -p "${BACKUP_DIR}"
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would export schema '${schema}' to ${output_file}"
|
||||||
|
log_info "DRY RUN: Excluding large execution tables"
|
||||||
|
touch "$output_file"
|
||||||
|
echo "$output_file"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Exporting from schema: ${schema}"
|
||||||
|
log_info "EXCLUDING: AgentGraphExecution, AgentNodeExecution, AgentNodeExecutionInputOutput, AgentNodeExecutionKeyValueData, NotificationEvent"
|
||||||
|
|
||||||
|
pg_dump "${base_url}" \
|
||||||
|
--schema="${schema}" \
|
||||||
|
--format=plain \
|
||||||
|
--no-owner \
|
||||||
|
--no-privileges \
|
||||||
|
--data-only \
|
||||||
|
--exclude-table="${schema}.AgentGraphExecution" \
|
||||||
|
--exclude-table="${schema}.AgentNodeExecution" \
|
||||||
|
--exclude-table="${schema}.AgentNodeExecutionInputOutput" \
|
||||||
|
--exclude-table="${schema}.AgentNodeExecutionKeyValueData" \
|
||||||
|
--exclude-table="${schema}.NotificationEvent" \
|
||||||
|
--file="${output_file}" 2>&1
|
||||||
|
|
||||||
|
# Remove Supabase-specific commands that break import
|
||||||
|
sed -i.bak '/\\restrict/d' "${output_file}"
|
||||||
|
rm -f "${output_file}.bak"
|
||||||
|
|
||||||
|
local size=$(du -h "${output_file}" | cut -f1)
|
||||||
|
log_success "Platform data exported: ${output_file} (${size})"
|
||||||
|
echo "$output_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 2: Export auth.users data
|
||||||
|
# ============================================
|
||||||
|
export_auth_data() {
|
||||||
|
local base_url=$(get_base_url "$SOURCE_URL")
|
||||||
|
local output_file="${BACKUP_DIR}/auth_users_${TIMESTAMP}.csv"
|
||||||
|
|
||||||
|
log_info "=== STEP 2: Exporting auth.users data ==="
|
||||||
|
|
||||||
|
# Check if auth.users exists
|
||||||
|
local auth_exists=$(psql "${base_url}" -t -c "SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'auth' AND table_name = 'users')" 2>/dev/null | tr -d ' ')
|
||||||
|
|
||||||
|
if [[ "$auth_exists" != "t" ]]; then
|
||||||
|
log_warn "No auth.users table found - skipping auth export"
|
||||||
|
echo ""
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would export auth.users to ${output_file}"
|
||||||
|
touch "$output_file"
|
||||||
|
echo "$output_file"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Extracting auth data (passwords, OAuth IDs, email verification)..."
|
||||||
|
|
||||||
|
psql "${base_url}" -c "\COPY (
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
encrypted_password,
|
||||||
|
(email_confirmed_at IS NOT NULL) as email_verified,
|
||||||
|
CASE
|
||||||
|
WHEN raw_app_meta_data->>'provider' = 'google'
|
||||||
|
THEN raw_app_meta_data->>'provider_id'
|
||||||
|
ELSE NULL
|
||||||
|
END as google_id
|
||||||
|
FROM auth.users
|
||||||
|
WHERE encrypted_password IS NOT NULL
|
||||||
|
OR raw_app_meta_data->>'provider' = 'google'
|
||||||
|
) TO '${output_file}' WITH CSV HEADER"
|
||||||
|
|
||||||
|
local count=$(wc -l < "${output_file}" | tr -d ' ')
|
||||||
|
log_success "Auth data exported: ${output_file} (${count} rows including header)"
|
||||||
|
echo "$output_file"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 3: Import platform data to destination
|
||||||
|
# ============================================
|
||||||
|
import_platform_data() {
|
||||||
|
local platform_file="$1"
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "=== STEP 3: Importing platform data to destination ==="
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would import ${platform_file} to destination"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$platform_file" ]]; then
|
||||||
|
log_error "Platform data file not found: ${platform_file}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Importing platform data (this may take a while)..."
|
||||||
|
|
||||||
|
# Import with error logging
|
||||||
|
psql "${dest_base}" -f "${platform_file}" 2>&1 | tee "${BACKUP_DIR}/import_log_${TIMESTAMP}.txt" | head -100
|
||||||
|
|
||||||
|
log_success "Platform data import completed"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 4: Update User table with auth data
|
||||||
|
# ============================================
|
||||||
|
update_user_auth_data() {
|
||||||
|
local auth_file="$1"
|
||||||
|
local schema=$(get_schema_from_url "$DEST_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "=== STEP 4: Updating User table with auth data ==="
|
||||||
|
|
||||||
|
if [[ -z "$auth_file" || ! -f "$auth_file" ]]; then
|
||||||
|
log_warn "No auth data file - skipping User auth update"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would update User table with auth data"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
log_info "Creating temporary table for auth data..."
|
||||||
|
|
||||||
|
psql "${dest_base}" << EOF
|
||||||
|
-- Create temp table for auth data
|
||||||
|
CREATE TEMP TABLE temp_auth_users (
|
||||||
|
id UUID,
|
||||||
|
encrypted_password TEXT,
|
||||||
|
email_verified BOOLEAN,
|
||||||
|
google_id TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Import CSV
|
||||||
|
\COPY temp_auth_users FROM '${auth_file}' WITH CSV HEADER;
|
||||||
|
|
||||||
|
-- Update User table with password hashes
|
||||||
|
UPDATE ${schema}."User" u
|
||||||
|
SET "passwordHash" = t.encrypted_password
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.encrypted_password IS NOT NULL
|
||||||
|
AND u."passwordHash" IS NULL;
|
||||||
|
|
||||||
|
-- Update User table with email verification
|
||||||
|
UPDATE ${schema}."User" u
|
||||||
|
SET "emailVerified" = t.email_verified
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.email_verified = true;
|
||||||
|
|
||||||
|
-- Update User table with Google OAuth IDs
|
||||||
|
UPDATE ${schema}."User" u
|
||||||
|
SET "googleId" = t.google_id
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.google_id IS NOT NULL
|
||||||
|
AND u."googleId" IS NULL;
|
||||||
|
|
||||||
|
-- Show results
|
||||||
|
SELECT
|
||||||
|
'Total Users' as metric, COUNT(*)::text as value FROM ${schema}."User"
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'With Password', COUNT(*)::text FROM ${schema}."User" WHERE "passwordHash" IS NOT NULL
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'With Google OAuth', COUNT(*)::text FROM ${schema}."User" WHERE "googleId" IS NOT NULL
|
||||||
|
UNION ALL
|
||||||
|
SELECT 'Email Verified', COUNT(*)::text FROM ${schema}."User" WHERE "emailVerified" = true;
|
||||||
|
|
||||||
|
DROP TABLE temp_auth_users;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
log_success "User auth data updated"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# STEP 5: Refresh materialized views
|
||||||
|
# ============================================
|
||||||
|
refresh_views() {
|
||||||
|
local schema=$(get_schema_from_url "$DEST_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
|
||||||
|
log_info "=== STEP 5: Refreshing materialized views ==="
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == true ]]; then
|
||||||
|
log_info "DRY RUN: Would refresh materialized views"
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
psql "${dest_base}" << EOF
|
||||||
|
SET search_path TO ${schema};
|
||||||
|
REFRESH MATERIALIZED VIEW "mv_agent_run_counts";
|
||||||
|
REFRESH MATERIALIZED VIEW "mv_review_stats";
|
||||||
|
|
||||||
|
-- Reset sequences
|
||||||
|
SELECT setval(
|
||||||
|
pg_get_serial_sequence('${schema}."SearchTerms"', 'id'),
|
||||||
|
COALESCE((SELECT MAX(id) FROM ${schema}."SearchTerms"), 0) + 1,
|
||||||
|
false
|
||||||
|
);
|
||||||
|
EOF
|
||||||
|
|
||||||
|
log_success "Materialized views refreshed"
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Verification
|
||||||
|
# ============================================
|
||||||
|
verify_migration() {
|
||||||
|
local source_base=$(get_base_url "$SOURCE_URL")
|
||||||
|
local dest_base=$(get_base_url "$DEST_URL")
|
||||||
|
local schema=$(get_schema_from_url "$SOURCE_URL")
|
||||||
|
|
||||||
|
log_info "=== VERIFICATION ==="
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Source counts:"
|
||||||
|
psql "${source_base}" -c "SELECT 'User' as table_name, COUNT(*) FROM ${schema}.\"User\" UNION ALL SELECT 'AgentGraph', COUNT(*) FROM ${schema}.\"AgentGraph\" UNION ALL SELECT 'Profile', COUNT(*) FROM ${schema}.\"Profile\""
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Destination counts:"
|
||||||
|
psql "${dest_base}" -c "SELECT 'User' as table_name, COUNT(*) FROM ${schema}.\"User\" UNION ALL SELECT 'AgentGraph', COUNT(*) FROM ${schema}.\"AgentGraph\" UNION ALL SELECT 'Profile', COUNT(*) FROM ${schema}.\"Profile\""
|
||||||
|
}
|
||||||
|
|
||||||
|
# ============================================
|
||||||
|
# Main
|
||||||
|
# ============================================
|
||||||
|
main() {
|
||||||
|
echo ""
|
||||||
|
echo "========================================"
|
||||||
|
echo " Database Migration Script"
|
||||||
|
echo "========================================"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
parse_args "$@"
|
||||||
|
|
||||||
|
log_info "Source: $(get_base_url "$SOURCE_URL")"
|
||||||
|
log_info "Destination: $(get_base_url "$DEST_URL")"
|
||||||
|
[[ "$DRY_RUN" == true ]] && log_warn "DRY RUN MODE"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
test_connections
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 0: Nuke destination database (with confirmation)
|
||||||
|
nuke_destination
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" != true ]]; then
|
||||||
|
log_warn "This will migrate data to the destination database."
|
||||||
|
read -p "Continue with migration? (y/N) " -n 1 -r
|
||||||
|
echo ""
|
||||||
|
[[ ! $REPLY =~ ^[Yy]$ ]] && { log_info "Cancelled"; exit 0; }
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_info "Starting migration at $(date)"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 1: Export platform data (READ-ONLY on source)
|
||||||
|
platform_file=$(export_platform_data)
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 2: Export auth data (READ-ONLY on source)
|
||||||
|
auth_file=$(export_auth_data)
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 3: Import platform data to destination
|
||||||
|
import_platform_data "$platform_file"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 4: Update User table with auth data
|
||||||
|
update_user_auth_data "$auth_file"
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Step 5: Refresh materialized views
|
||||||
|
refresh_views
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
# Verification
|
||||||
|
verify_migration
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
log_success "Migration completed at $(date)"
|
||||||
|
echo ""
|
||||||
|
echo "Files created:"
|
||||||
|
echo " - Platform data: ${platform_file}"
|
||||||
|
[[ -n "$auth_file" ]] && echo " - Auth data: ${auth_file}"
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
main "$@"
|
||||||
141
autogpt_platform/backend/scripts/verify_migration.sql
Normal file
141
autogpt_platform/backend/scripts/verify_migration.sql
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
-- Database Migration Verification Script
|
||||||
|
-- Run this on both source (Supabase) and target (GCP) databases to compare
|
||||||
|
|
||||||
|
SET search_path TO platform;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- TABLE ROW COUNTS
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== TABLE ROW COUNTS ===' as section;
|
||||||
|
|
||||||
|
SELECT 'User' as table_name, COUNT(*) as row_count FROM "User"
|
||||||
|
UNION ALL SELECT 'Profile', COUNT(*) FROM "Profile"
|
||||||
|
UNION ALL SELECT 'UserOnboarding', COUNT(*) FROM "UserOnboarding"
|
||||||
|
UNION ALL SELECT 'UserBalance', COUNT(*) FROM "UserBalance"
|
||||||
|
UNION ALL SELECT 'AgentGraph', COUNT(*) FROM "AgentGraph"
|
||||||
|
UNION ALL SELECT 'AgentNode', COUNT(*) FROM "AgentNode"
|
||||||
|
UNION ALL SELECT 'AgentBlock', COUNT(*) FROM "AgentBlock"
|
||||||
|
UNION ALL SELECT 'AgentNodeLink', COUNT(*) FROM "AgentNodeLink"
|
||||||
|
UNION ALL SELECT 'AgentGraphExecution', COUNT(*) FROM "AgentGraphExecution"
|
||||||
|
UNION ALL SELECT 'AgentNodeExecution', COUNT(*) FROM "AgentNodeExecution"
|
||||||
|
UNION ALL SELECT 'AgentNodeExecutionInputOutput', COUNT(*) FROM "AgentNodeExecutionInputOutput"
|
||||||
|
UNION ALL SELECT 'AgentNodeExecutionKeyValueData', COUNT(*) FROM "AgentNodeExecutionKeyValueData"
|
||||||
|
UNION ALL SELECT 'AgentPreset', COUNT(*) FROM "AgentPreset"
|
||||||
|
UNION ALL SELECT 'LibraryAgent', COUNT(*) FROM "LibraryAgent"
|
||||||
|
UNION ALL SELECT 'StoreListing', COUNT(*) FROM "StoreListing"
|
||||||
|
UNION ALL SELECT 'StoreListingVersion', COUNT(*) FROM "StoreListingVersion"
|
||||||
|
UNION ALL SELECT 'StoreListingReview', COUNT(*) FROM "StoreListingReview"
|
||||||
|
UNION ALL SELECT 'IntegrationWebhook', COUNT(*) FROM "IntegrationWebhook"
|
||||||
|
UNION ALL SELECT 'APIKey', COUNT(*) FROM "APIKey"
|
||||||
|
UNION ALL SELECT 'CreditTransaction', COUNT(*) FROM "CreditTransaction"
|
||||||
|
UNION ALL SELECT 'CreditRefundRequest', COUNT(*) FROM "CreditRefundRequest"
|
||||||
|
UNION ALL SELECT 'AnalyticsDetails', COUNT(*) FROM "AnalyticsDetails"
|
||||||
|
UNION ALL SELECT 'AnalyticsMetrics', COUNT(*) FROM "AnalyticsMetrics"
|
||||||
|
UNION ALL SELECT 'SearchTerms', COUNT(*) FROM "SearchTerms"
|
||||||
|
UNION ALL SELECT 'NotificationEvent', COUNT(*) FROM "NotificationEvent"
|
||||||
|
UNION ALL SELECT 'UserNotificationBatch', COUNT(*) FROM "UserNotificationBatch"
|
||||||
|
UNION ALL SELECT 'BuilderSearchHistory', COUNT(*) FROM "BuilderSearchHistory"
|
||||||
|
UNION ALL SELECT 'PendingHumanReview', COUNT(*) FROM "PendingHumanReview"
|
||||||
|
UNION ALL SELECT 'RefreshToken', COUNT(*) FROM "RefreshToken"
|
||||||
|
UNION ALL SELECT 'PasswordResetToken', COUNT(*) FROM "PasswordResetToken"
|
||||||
|
ORDER BY table_name;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- AUTH DATA VERIFICATION
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== AUTH DATA VERIFICATION ===' as section;
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as total_users,
|
||||||
|
COUNT("passwordHash") as users_with_password,
|
||||||
|
COUNT("googleId") as users_with_google,
|
||||||
|
COUNT(CASE WHEN "emailVerified" = true THEN 1 END) as verified_emails,
|
||||||
|
COUNT(CASE WHEN "passwordHash" IS NULL AND "googleId" IS NULL THEN 1 END) as users_without_auth
|
||||||
|
FROM "User";
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- VIEW VERIFICATION
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== VIEW VERIFICATION ===' as section;
|
||||||
|
|
||||||
|
SELECT 'StoreAgent' as view_name, COUNT(*) as row_count FROM "StoreAgent"
|
||||||
|
UNION ALL SELECT 'Creator', COUNT(*) FROM "Creator"
|
||||||
|
UNION ALL SELECT 'StoreSubmission', COUNT(*) FROM "StoreSubmission";
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- MATERIALIZED VIEW VERIFICATION
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== MATERIALIZED VIEW VERIFICATION ===' as section;
|
||||||
|
|
||||||
|
SELECT 'mv_agent_run_counts' as view_name, COUNT(*) as row_count FROM "mv_agent_run_counts"
|
||||||
|
UNION ALL SELECT 'mv_review_stats', COUNT(*) FROM "mv_review_stats";
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- FOREIGN KEY INTEGRITY CHECKS
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== FOREIGN KEY INTEGRITY (should all be 0) ===' as section;
|
||||||
|
|
||||||
|
SELECT 'Orphaned Profiles' as check_name,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM "Profile" p
|
||||||
|
WHERE p."userId" IS NOT NULL
|
||||||
|
AND NOT EXISTS (SELECT 1 FROM "User" u WHERE u.id = p."userId");
|
||||||
|
|
||||||
|
SELECT 'Orphaned AgentGraphs' as check_name,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM "AgentGraph" g
|
||||||
|
WHERE NOT EXISTS (SELECT 1 FROM "User" u WHERE u.id = g."userId");
|
||||||
|
|
||||||
|
SELECT 'Orphaned AgentNodes' as check_name,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM "AgentNode" n
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT 1 FROM "AgentGraph" g
|
||||||
|
WHERE g.id = n."agentGraphId" AND g.version = n."agentGraphVersion"
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT 'Orphaned Executions' as check_name,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM "AgentGraphExecution" e
|
||||||
|
WHERE NOT EXISTS (SELECT 1 FROM "User" u WHERE u.id = e."userId");
|
||||||
|
|
||||||
|
SELECT 'Orphaned LibraryAgents' as check_name,
|
||||||
|
COUNT(*) as count
|
||||||
|
FROM "LibraryAgent" l
|
||||||
|
WHERE NOT EXISTS (SELECT 1 FROM "User" u WHERE u.id = l."userId");
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- SAMPLE DATA VERIFICATION
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== SAMPLE USERS (first 5) ===' as section;
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
email,
|
||||||
|
"emailVerified",
|
||||||
|
CASE WHEN "passwordHash" IS NOT NULL THEN 'YES' ELSE 'NO' END as has_password,
|
||||||
|
CASE WHEN "googleId" IS NOT NULL THEN 'YES' ELSE 'NO' END as has_google,
|
||||||
|
"createdAt"
|
||||||
|
FROM "User"
|
||||||
|
ORDER BY "createdAt" DESC
|
||||||
|
LIMIT 5;
|
||||||
|
|
||||||
|
-- ============================================
|
||||||
|
-- STORE LISTINGS SAMPLE
|
||||||
|
-- ============================================
|
||||||
|
|
||||||
|
SELECT '=== SAMPLE STORE LISTINGS (first 5) ===' as section;
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
slug,
|
||||||
|
"isDeleted",
|
||||||
|
"hasApprovedVersion"
|
||||||
|
FROM "StoreListing"
|
||||||
|
LIMIT 5;
|
||||||
286
autogpt_platform/backend/scripts/verify_scheduler_data.py
Normal file
286
autogpt_platform/backend/scripts/verify_scheduler_data.py
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
"""
|
||||||
|
Verification script to check scheduler data integrity after native auth migration.
|
||||||
|
|
||||||
|
This script verifies that all scheduled jobs reference valid users in the platform.User table.
|
||||||
|
It can also clean up orphaned schedules (schedules for users that no longer exist).
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
cd backend
|
||||||
|
poetry run python scripts/verify_scheduler_data.py [options]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--dry-run Preview what would be cleaned up without making changes
|
||||||
|
--cleanup Actually remove orphaned schedules
|
||||||
|
--database-url <url> Database URL (overrides DATABASE_URL env var)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
# Check for orphaned schedules (read-only)
|
||||||
|
poetry run python scripts/verify_scheduler_data.py
|
||||||
|
|
||||||
|
# Preview cleanup
|
||||||
|
poetry run python scripts/verify_scheduler_data.py --dry-run
|
||||||
|
|
||||||
|
# Actually clean up orphaned schedules
|
||||||
|
poetry run python scripts/verify_scheduler_data.py --cleanup
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
- Database must be accessible
|
||||||
|
- Scheduler service must be running (for cleanup operations)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pickle
|
||||||
|
import sys
|
||||||
|
from datetime import datetime
|
||||||
|
from urllib.parse import parse_qs, urlparse, urlunparse, urlencode
|
||||||
|
|
||||||
|
from prisma import Prisma
|
||||||
|
from sqlalchemy import create_engine, text, MetaData
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_schema_from_url(database_url: str) -> tuple[str, str]:
|
||||||
|
"""Extract schema from DATABASE_URL and return (schema, clean_url)."""
|
||||||
|
parsed_url = urlparse(database_url)
|
||||||
|
query_params = parse_qs(parsed_url.query)
|
||||||
|
schema_list = query_params.pop("schema", None)
|
||||||
|
schema = schema_list[0] if schema_list else "public"
|
||||||
|
new_query = urlencode(query_params, doseq=True)
|
||||||
|
new_parsed_url = parsed_url._replace(query=new_query)
|
||||||
|
database_url_clean = str(urlunparse(new_parsed_url))
|
||||||
|
return schema, database_url_clean
|
||||||
|
|
||||||
|
|
||||||
|
async def get_all_user_ids(db: Prisma) -> set[str]:
|
||||||
|
"""Get all user IDs from the platform.User table."""
|
||||||
|
users = await db.user.find_many(select={"id": True})
|
||||||
|
return {user.id for user in users}
|
||||||
|
|
||||||
|
|
||||||
|
def get_scheduler_jobs(db_url: str, schema: str) -> list[dict]:
|
||||||
|
"""Get all jobs from the apscheduler_jobs table."""
|
||||||
|
engine = create_engine(db_url)
|
||||||
|
jobs = []
|
||||||
|
|
||||||
|
with engine.connect() as conn:
|
||||||
|
# Check if table exists
|
||||||
|
result = conn.execute(
|
||||||
|
text(
|
||||||
|
f"""
|
||||||
|
SELECT EXISTS (
|
||||||
|
SELECT FROM information_schema.tables
|
||||||
|
WHERE table_schema = :schema
|
||||||
|
AND table_name = 'apscheduler_jobs'
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
),
|
||||||
|
{"schema": schema},
|
||||||
|
)
|
||||||
|
if not result.scalar():
|
||||||
|
logger.warning(
|
||||||
|
f"Table {schema}.apscheduler_jobs does not exist. "
|
||||||
|
"Scheduler may not have been initialized yet."
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get all jobs
|
||||||
|
result = conn.execute(
|
||||||
|
text(f'SELECT id, job_state FROM {schema}."apscheduler_jobs"')
|
||||||
|
)
|
||||||
|
|
||||||
|
for row in result:
|
||||||
|
job_id = row[0]
|
||||||
|
job_state = row[1]
|
||||||
|
|
||||||
|
try:
|
||||||
|
# APScheduler stores job state as pickled data
|
||||||
|
job_data = pickle.loads(job_state)
|
||||||
|
kwargs = job_data.get("kwargs", {})
|
||||||
|
|
||||||
|
# Only process graph execution jobs (have user_id)
|
||||||
|
if "user_id" in kwargs:
|
||||||
|
jobs.append(
|
||||||
|
{
|
||||||
|
"id": job_id,
|
||||||
|
"user_id": kwargs.get("user_id"),
|
||||||
|
"graph_id": kwargs.get("graph_id"),
|
||||||
|
"graph_version": kwargs.get("graph_version"),
|
||||||
|
"cron": kwargs.get("cron"),
|
||||||
|
"agent_name": kwargs.get("agent_name"),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Failed to parse job {job_id}: {e}")
|
||||||
|
|
||||||
|
return jobs
|
||||||
|
|
||||||
|
|
||||||
|
async def verify_scheduler_data(
|
||||||
|
db: Prisma, db_url: str, schema: str
|
||||||
|
) -> tuple[list[dict], list[dict]]:
|
||||||
|
"""
|
||||||
|
Verify scheduler data integrity.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (valid_jobs, orphaned_jobs)
|
||||||
|
"""
|
||||||
|
logger.info("Fetching all users from platform.User...")
|
||||||
|
user_ids = await get_all_user_ids(db)
|
||||||
|
logger.info(f"Found {len(user_ids)} users in platform.User")
|
||||||
|
|
||||||
|
logger.info("Fetching scheduled jobs from apscheduler_jobs...")
|
||||||
|
jobs = get_scheduler_jobs(db_url, schema)
|
||||||
|
logger.info(f"Found {len(jobs)} scheduled graph execution jobs")
|
||||||
|
|
||||||
|
valid_jobs = []
|
||||||
|
orphaned_jobs = []
|
||||||
|
|
||||||
|
for job in jobs:
|
||||||
|
if job["user_id"] in user_ids:
|
||||||
|
valid_jobs.append(job)
|
||||||
|
else:
|
||||||
|
orphaned_jobs.append(job)
|
||||||
|
|
||||||
|
return valid_jobs, orphaned_jobs
|
||||||
|
|
||||||
|
|
||||||
|
async def cleanup_orphaned_schedules(orphaned_jobs: list[dict], db_url: str, schema: str):
|
||||||
|
"""Remove orphaned schedules from the database."""
|
||||||
|
if not orphaned_jobs:
|
||||||
|
logger.info("No orphaned schedules to clean up")
|
||||||
|
return
|
||||||
|
|
||||||
|
engine = create_engine(db_url)
|
||||||
|
|
||||||
|
with engine.connect() as conn:
|
||||||
|
for job in orphaned_jobs:
|
||||||
|
try:
|
||||||
|
conn.execute(
|
||||||
|
text(f'DELETE FROM {schema}."apscheduler_jobs" WHERE id = :job_id'),
|
||||||
|
{"job_id": job["id"]},
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Deleted orphaned schedule {job['id']} "
|
||||||
|
f"(user: {job['user_id']}, graph: {job['graph_id']})"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to delete schedule {job['id']}: {e}")
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
logger.info(f"Cleaned up {len(orphaned_jobs)} orphaned schedules")
|
||||||
|
|
||||||
|
|
||||||
|
async def main(dry_run: bool = False, cleanup: bool = False):
|
||||||
|
"""Run the verification."""
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info("Scheduler Data Verification Script")
|
||||||
|
if dry_run:
|
||||||
|
logger.info(">>> DRY RUN MODE - No changes will be made <<<")
|
||||||
|
elif cleanup:
|
||||||
|
logger.info(">>> CLEANUP MODE - Orphaned schedules will be removed <<<")
|
||||||
|
else:
|
||||||
|
logger.info(">>> VERIFY MODE - Read-only check <<<")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info(f"Started at: {datetime.now().isoformat()}")
|
||||||
|
|
||||||
|
# Get database URL
|
||||||
|
db_url = os.getenv("DIRECT_URL") or os.getenv("DATABASE_URL")
|
||||||
|
if not db_url:
|
||||||
|
logger.error("DATABASE_URL or DIRECT_URL environment variable not set")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
schema, clean_db_url = _extract_schema_from_url(db_url)
|
||||||
|
logger.info(f"Using schema: {schema}")
|
||||||
|
|
||||||
|
db = Prisma()
|
||||||
|
await db.connect()
|
||||||
|
|
||||||
|
try:
|
||||||
|
valid_jobs, orphaned_jobs = await verify_scheduler_data(db, clean_db_url, schema)
|
||||||
|
|
||||||
|
# Report results
|
||||||
|
logger.info("\n--- Verification Results ---")
|
||||||
|
logger.info(f"Valid scheduled jobs: {len(valid_jobs)}")
|
||||||
|
logger.info(f"Orphaned scheduled jobs: {len(orphaned_jobs)}")
|
||||||
|
|
||||||
|
if orphaned_jobs:
|
||||||
|
logger.warning("\n--- Orphaned Schedules (users not in platform.User) ---")
|
||||||
|
for job in orphaned_jobs:
|
||||||
|
logger.warning(
|
||||||
|
f" Schedule ID: {job['id']}\n"
|
||||||
|
f" User ID: {job['user_id']}\n"
|
||||||
|
f" Graph ID: {job['graph_id']}\n"
|
||||||
|
f" Cron: {job['cron']}\n"
|
||||||
|
f" Agent: {job['agent_name'] or 'N/A'}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if cleanup and not dry_run:
|
||||||
|
logger.info("\n--- Cleaning up orphaned schedules ---")
|
||||||
|
await cleanup_orphaned_schedules(orphaned_jobs, clean_db_url, schema)
|
||||||
|
elif dry_run:
|
||||||
|
logger.info(
|
||||||
|
f"\n[DRY RUN] Would delete {len(orphaned_jobs)} orphaned schedules"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
"\nTo clean up orphaned schedules, run with --cleanup flag"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info("\n✅ All scheduled jobs reference valid users!")
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
logger.info("\n" + "=" * 60)
|
||||||
|
if orphaned_jobs and cleanup and not dry_run:
|
||||||
|
logger.info("Cleanup completed successfully!")
|
||||||
|
else:
|
||||||
|
logger.info("Verification completed!")
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Verification failed: {e}")
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await db.disconnect()
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
"""Parse command line arguments."""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Verify scheduler data integrity after native auth migration"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry-run",
|
||||||
|
action="store_true",
|
||||||
|
help="Preview what would be cleaned up without making changes",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--cleanup",
|
||||||
|
action="store_true",
|
||||||
|
help="Actually remove orphaned schedules",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--database-url",
|
||||||
|
type=str,
|
||||||
|
help="Database URL (overrides DATABASE_URL env var)",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
# Override DATABASE_URL if provided via command line
|
||||||
|
if args.database_url:
|
||||||
|
os.environ["DATABASE_URL"] = args.database_url
|
||||||
|
os.environ["DIRECT_URL"] = args.database_url
|
||||||
|
|
||||||
|
asyncio.run(main(dry_run=args.dry_run, cleanup=args.cleanup))
|
||||||
@@ -19,21 +19,21 @@ images: {
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import random
|
import random
|
||||||
from typing import Any, Dict, List
|
from typing import Any, Dict, List, cast
|
||||||
|
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
|
from prisma.types import AgentBlockCreateInput
|
||||||
|
|
||||||
from backend.data.auth.api_key import create_api_key
|
from backend.data.auth.api_key import create_api_key
|
||||||
from backend.data.credit import get_user_credit_model
|
from backend.data.credit import get_user_credit_model
|
||||||
from backend.data.db import prisma
|
from backend.data.db import prisma
|
||||||
from backend.data.graph import Graph, Link, Node, create_graph
|
from backend.data.graph import Graph, Link, Node, create_graph
|
||||||
from backend.data.user import get_or_create_user
|
|
||||||
|
|
||||||
# Import API functions from the backend
|
# Import API functions from the backend
|
||||||
|
from backend.server.auth.service import AuthService
|
||||||
from backend.server.v2.library.db import create_library_agent, create_preset
|
from backend.server.v2.library.db import create_library_agent, create_preset
|
||||||
from backend.server.v2.library.model import LibraryAgentPresetCreatable
|
from backend.server.v2.library.model import LibraryAgentPresetCreatable
|
||||||
from backend.server.v2.store.db import create_store_submission, review_store_submission
|
from backend.server.v2.store.db import create_store_submission, review_store_submission
|
||||||
from backend.util.clients import get_supabase
|
|
||||||
|
|
||||||
faker = Faker()
|
faker = Faker()
|
||||||
|
|
||||||
@@ -107,10 +107,10 @@ class TestDataCreator:
|
|||||||
self.profiles: List[Dict[str, Any]] = []
|
self.profiles: List[Dict[str, Any]] = []
|
||||||
|
|
||||||
async def create_test_users(self) -> List[Dict[str, Any]]:
|
async def create_test_users(self) -> List[Dict[str, Any]]:
|
||||||
"""Create test users using Supabase client."""
|
"""Create test users using native auth service."""
|
||||||
print(f"Creating {NUM_USERS} test users...")
|
print(f"Creating {NUM_USERS} test users...")
|
||||||
|
|
||||||
supabase = get_supabase()
|
auth_service = AuthService()
|
||||||
users = []
|
users = []
|
||||||
|
|
||||||
for i in range(NUM_USERS):
|
for i in range(NUM_USERS):
|
||||||
@@ -122,30 +122,35 @@ class TestDataCreator:
|
|||||||
else:
|
else:
|
||||||
email = faker.unique.email()
|
email = faker.unique.email()
|
||||||
password = "testpassword123" # Standard test password
|
password = "testpassword123" # Standard test password
|
||||||
user_id = f"test-user-{i}-{faker.uuid4()}"
|
|
||||||
|
|
||||||
# Create user in Supabase Auth (if needed)
|
# Try to create user with password using AuthService
|
||||||
try:
|
try:
|
||||||
auth_response = supabase.auth.admin.create_user(
|
user = await auth_service.register_user(
|
||||||
{"email": email, "password": password, "email_confirm": True}
|
email=email,
|
||||||
|
password=password,
|
||||||
|
name=faker.name(),
|
||||||
)
|
)
|
||||||
if auth_response.user:
|
users.append(
|
||||||
user_id = auth_response.user.id
|
{
|
||||||
except Exception as supabase_error:
|
"id": user.id,
|
||||||
print(
|
"email": user.email,
|
||||||
f"Supabase user creation failed for {email}, using fallback: {supabase_error}"
|
"name": user.name,
|
||||||
|
"role": user.role,
|
||||||
|
}
|
||||||
)
|
)
|
||||||
# Fall back to direct database creation
|
except ValueError as e:
|
||||||
|
# User already exists, get them instead
|
||||||
# Create mock user data similar to what auth middleware would provide
|
print(f"User {email} already exists, fetching: {e}")
|
||||||
user_data = {
|
existing_user = await auth_service.get_user_by_email(email)
|
||||||
"sub": user_id,
|
if existing_user:
|
||||||
"email": email,
|
users.append(
|
||||||
}
|
{
|
||||||
|
"id": existing_user.id,
|
||||||
# Use the API function to create user in local database
|
"email": existing_user.email,
|
||||||
user = await get_or_create_user(user_data)
|
"name": existing_user.name,
|
||||||
users.append(user.model_dump())
|
"role": existing_user.role,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error creating user {i}: {e}")
|
print(f"Error creating user {i}: {e}")
|
||||||
@@ -177,12 +182,15 @@ class TestDataCreator:
|
|||||||
for block in blocks_to_create:
|
for block in blocks_to_create:
|
||||||
try:
|
try:
|
||||||
await prisma.agentblock.create(
|
await prisma.agentblock.create(
|
||||||
data={
|
data=cast(
|
||||||
"id": block.id,
|
AgentBlockCreateInput,
|
||||||
"name": block.name,
|
{
|
||||||
"inputSchema": "{}",
|
"id": block.id,
|
||||||
"outputSchema": "{}",
|
"name": block.name,
|
||||||
}
|
"inputSchema": "{}",
|
||||||
|
"outputSchema": "{}",
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error creating block {block.name}: {e}")
|
print(f"Error creating block {block.name}: {e}")
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ images: {
|
|||||||
import asyncio
|
import asyncio
|
||||||
import random
|
import random
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import prisma.enums
|
import prisma.enums
|
||||||
import pytest
|
import pytest
|
||||||
@@ -30,13 +31,19 @@ from prisma.types import (
|
|||||||
AgentGraphCreateInput,
|
AgentGraphCreateInput,
|
||||||
AgentNodeCreateInput,
|
AgentNodeCreateInput,
|
||||||
AgentNodeLinkCreateInput,
|
AgentNodeLinkCreateInput,
|
||||||
|
AgentPresetCreateInput,
|
||||||
AnalyticsDetailsCreateInput,
|
AnalyticsDetailsCreateInput,
|
||||||
AnalyticsMetricsCreateInput,
|
AnalyticsMetricsCreateInput,
|
||||||
|
APIKeyCreateInput,
|
||||||
CreditTransactionCreateInput,
|
CreditTransactionCreateInput,
|
||||||
IntegrationWebhookCreateInput,
|
IntegrationWebhookCreateInput,
|
||||||
|
LibraryAgentCreateInput,
|
||||||
ProfileCreateInput,
|
ProfileCreateInput,
|
||||||
|
StoreListingCreateInput,
|
||||||
StoreListingReviewCreateInput,
|
StoreListingReviewCreateInput,
|
||||||
|
StoreListingVersionCreateInput,
|
||||||
UserCreateInput,
|
UserCreateInput,
|
||||||
|
UserOnboardingCreateInput,
|
||||||
)
|
)
|
||||||
|
|
||||||
faker = Faker()
|
faker = Faker()
|
||||||
@@ -172,14 +179,17 @@ async def main():
|
|||||||
for _ in range(num_presets): # Create 1 AgentPreset per user
|
for _ in range(num_presets): # Create 1 AgentPreset per user
|
||||||
graph = random.choice(agent_graphs)
|
graph = random.choice(agent_graphs)
|
||||||
preset = await db.agentpreset.create(
|
preset = await db.agentpreset.create(
|
||||||
data={
|
data=cast(
|
||||||
"name": faker.sentence(nb_words=3),
|
AgentPresetCreateInput,
|
||||||
"description": faker.text(max_nb_chars=200),
|
{
|
||||||
"userId": user.id,
|
"name": faker.sentence(nb_words=3),
|
||||||
"agentGraphId": graph.id,
|
"description": faker.text(max_nb_chars=200),
|
||||||
"agentGraphVersion": graph.version,
|
"userId": user.id,
|
||||||
"isActive": True,
|
"agentGraphId": graph.id,
|
||||||
}
|
"agentGraphVersion": graph.version,
|
||||||
|
"isActive": True,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
agent_presets.append(preset)
|
agent_presets.append(preset)
|
||||||
|
|
||||||
@@ -220,18 +230,21 @@ async def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
library_agent = await db.libraryagent.create(
|
library_agent = await db.libraryagent.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
LibraryAgentCreateInput,
|
||||||
"agentGraphId": graph.id,
|
{
|
||||||
"agentGraphVersion": graph.version,
|
"userId": user.id,
|
||||||
"creatorId": creator_profile.id if creator_profile else None,
|
"agentGraphId": graph.id,
|
||||||
"imageUrl": get_image() if random.random() < 0.5 else None,
|
"agentGraphVersion": graph.version,
|
||||||
"useGraphIsActiveVersion": random.choice([True, False]),
|
"creatorId": creator_profile.id if creator_profile else None,
|
||||||
"isFavorite": random.choice([True, False]),
|
"imageUrl": get_image() if random.random() < 0.5 else None,
|
||||||
"isCreatedByUser": random.choice([True, False]),
|
"useGraphIsActiveVersion": random.choice([True, False]),
|
||||||
"isArchived": random.choice([True, False]),
|
"isFavorite": random.choice([True, False]),
|
||||||
"isDeleted": random.choice([True, False]),
|
"isCreatedByUser": random.choice([True, False]),
|
||||||
}
|
"isArchived": random.choice([True, False]),
|
||||||
|
"isDeleted": random.choice([True, False]),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
library_agents.append(library_agent)
|
library_agents.append(library_agent)
|
||||||
|
|
||||||
@@ -392,13 +405,16 @@ async def main():
|
|||||||
user = random.choice(users)
|
user = random.choice(users)
|
||||||
slug = faker.slug()
|
slug = faker.slug()
|
||||||
listing = await db.storelisting.create(
|
listing = await db.storelisting.create(
|
||||||
data={
|
data=cast(
|
||||||
"agentGraphId": graph.id,
|
StoreListingCreateInput,
|
||||||
"agentGraphVersion": graph.version,
|
{
|
||||||
"owningUserId": user.id,
|
"agentGraphId": graph.id,
|
||||||
"hasApprovedVersion": random.choice([True, False]),
|
"agentGraphVersion": graph.version,
|
||||||
"slug": slug,
|
"owningUserId": user.id,
|
||||||
}
|
"hasApprovedVersion": random.choice([True, False]),
|
||||||
|
"slug": slug,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
store_listings.append(listing)
|
store_listings.append(listing)
|
||||||
|
|
||||||
@@ -408,26 +424,29 @@ async def main():
|
|||||||
for listing in store_listings:
|
for listing in store_listings:
|
||||||
graph = [g for g in agent_graphs if g.id == listing.agentGraphId][0]
|
graph = [g for g in agent_graphs if g.id == listing.agentGraphId][0]
|
||||||
version = await db.storelistingversion.create(
|
version = await db.storelistingversion.create(
|
||||||
data={
|
data=cast(
|
||||||
"agentGraphId": graph.id,
|
StoreListingVersionCreateInput,
|
||||||
"agentGraphVersion": graph.version,
|
{
|
||||||
"name": graph.name or faker.sentence(nb_words=3),
|
"agentGraphId": graph.id,
|
||||||
"subHeading": faker.sentence(),
|
"agentGraphVersion": graph.version,
|
||||||
"videoUrl": get_video_url() if random.random() < 0.3 else None,
|
"name": graph.name or faker.sentence(nb_words=3),
|
||||||
"imageUrls": [get_image() for _ in range(3)],
|
"subHeading": faker.sentence(),
|
||||||
"description": faker.text(),
|
"videoUrl": get_video_url() if random.random() < 0.3 else None,
|
||||||
"categories": [faker.word() for _ in range(3)],
|
"imageUrls": [get_image() for _ in range(3)],
|
||||||
"isFeatured": random.choice([True, False]),
|
"description": faker.text(),
|
||||||
"isAvailable": True,
|
"categories": [faker.word() for _ in range(3)],
|
||||||
"storeListingId": listing.id,
|
"isFeatured": random.choice([True, False]),
|
||||||
"submissionStatus": random.choice(
|
"isAvailable": True,
|
||||||
[
|
"storeListingId": listing.id,
|
||||||
prisma.enums.SubmissionStatus.PENDING,
|
"submissionStatus": random.choice(
|
||||||
prisma.enums.SubmissionStatus.APPROVED,
|
[
|
||||||
prisma.enums.SubmissionStatus.REJECTED,
|
prisma.enums.SubmissionStatus.PENDING,
|
||||||
]
|
prisma.enums.SubmissionStatus.APPROVED,
|
||||||
),
|
prisma.enums.SubmissionStatus.REJECTED,
|
||||||
}
|
]
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
store_listing_versions.append(version)
|
store_listing_versions.append(version)
|
||||||
|
|
||||||
@@ -469,51 +488,64 @@ async def main():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
await db.useronboarding.create(
|
await db.useronboarding.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
UserOnboardingCreateInput,
|
||||||
"completedSteps": completed_steps,
|
{
|
||||||
"walletShown": random.choice([True, False]),
|
"userId": user.id,
|
||||||
"notified": (
|
"completedSteps": completed_steps,
|
||||||
random.sample(completed_steps, k=min(3, len(completed_steps)))
|
"walletShown": random.choice([True, False]),
|
||||||
if completed_steps
|
"notified": (
|
||||||
else []
|
random.sample(
|
||||||
),
|
completed_steps, k=min(3, len(completed_steps))
|
||||||
"rewardedFor": (
|
)
|
||||||
random.sample(completed_steps, k=min(2, len(completed_steps)))
|
if completed_steps
|
||||||
if completed_steps
|
else []
|
||||||
else []
|
),
|
||||||
),
|
"rewardedFor": (
|
||||||
"usageReason": (
|
random.sample(
|
||||||
random.choice(["personal", "business", "research", "learning"])
|
completed_steps, k=min(2, len(completed_steps))
|
||||||
if random.random() < 0.7
|
)
|
||||||
else None
|
if completed_steps
|
||||||
),
|
else []
|
||||||
"integrations": random.sample(
|
),
|
||||||
["github", "google", "discord", "slack"], k=random.randint(0, 2)
|
"usageReason": (
|
||||||
),
|
random.choice(
|
||||||
"otherIntegrations": (
|
["personal", "business", "research", "learning"]
|
||||||
faker.word() if random.random() < 0.2 else None
|
)
|
||||||
),
|
if random.random() < 0.7
|
||||||
"selectedStoreListingVersionId": (
|
else None
|
||||||
random.choice(store_listing_versions).id
|
),
|
||||||
if store_listing_versions and random.random() < 0.5
|
"integrations": random.sample(
|
||||||
else None
|
["github", "google", "discord", "slack"],
|
||||||
),
|
k=random.randint(0, 2),
|
||||||
"onboardingAgentExecutionId": (
|
),
|
||||||
random.choice(agent_graph_executions).id
|
"otherIntegrations": (
|
||||||
if agent_graph_executions and random.random() < 0.3
|
faker.word() if random.random() < 0.2 else None
|
||||||
else None
|
),
|
||||||
),
|
"selectedStoreListingVersionId": (
|
||||||
"agentRuns": random.randint(0, 10),
|
random.choice(store_listing_versions).id
|
||||||
}
|
if store_listing_versions and random.random() < 0.5
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"onboardingAgentExecutionId": (
|
||||||
|
random.choice(agent_graph_executions).id
|
||||||
|
if agent_graph_executions and random.random() < 0.3
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"agentRuns": random.randint(0, 10),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error creating onboarding for user {user.id}: {e}")
|
print(f"Error creating onboarding for user {user.id}: {e}")
|
||||||
# Try simpler version
|
# Try simpler version
|
||||||
await db.useronboarding.create(
|
await db.useronboarding.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
UserOnboardingCreateInput,
|
||||||
}
|
{
|
||||||
|
"userId": user.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Insert IntegrationWebhooks for some users
|
# Insert IntegrationWebhooks for some users
|
||||||
@@ -544,20 +576,23 @@ async def main():
|
|||||||
for user in users:
|
for user in users:
|
||||||
api_key = APIKeySmith().generate_key()
|
api_key = APIKeySmith().generate_key()
|
||||||
await db.apikey.create(
|
await db.apikey.create(
|
||||||
data={
|
data=cast(
|
||||||
"name": faker.word(),
|
APIKeyCreateInput,
|
||||||
"head": api_key.head,
|
{
|
||||||
"tail": api_key.tail,
|
"name": faker.word(),
|
||||||
"hash": api_key.hash,
|
"head": api_key.head,
|
||||||
"salt": api_key.salt,
|
"tail": api_key.tail,
|
||||||
"status": prisma.enums.APIKeyStatus.ACTIVE,
|
"hash": api_key.hash,
|
||||||
"permissions": [
|
"salt": api_key.salt,
|
||||||
prisma.enums.APIKeyPermission.EXECUTE_GRAPH,
|
"status": prisma.enums.APIKeyStatus.ACTIVE,
|
||||||
prisma.enums.APIKeyPermission.READ_GRAPH,
|
"permissions": [
|
||||||
],
|
prisma.enums.APIKeyPermission.EXECUTE_GRAPH,
|
||||||
"description": faker.text(),
|
prisma.enums.APIKeyPermission.READ_GRAPH,
|
||||||
"userId": user.id,
|
],
|
||||||
}
|
"description": faker.text(),
|
||||||
|
"userId": user.id,
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Refresh materialized views
|
# Refresh materialized views
|
||||||
|
|||||||
@@ -12,10 +12,12 @@ Run this after test_data_creator.py to test that materialized views update corre
|
|||||||
import asyncio
|
import asyncio
|
||||||
import random
|
import random
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from typing import cast
|
||||||
|
|
||||||
import prisma.enums
|
import prisma.enums
|
||||||
from faker import Faker
|
from faker import Faker
|
||||||
from prisma import Json, Prisma
|
from prisma import Json, Prisma
|
||||||
|
from prisma.types import CreditTransactionCreateInput, StoreListingReviewCreateInput
|
||||||
|
|
||||||
faker = Faker()
|
faker = Faker()
|
||||||
|
|
||||||
@@ -166,16 +168,19 @@ async def main():
|
|||||||
score = random.choices([1, 2, 3, 4, 5], weights=[5, 10, 20, 40, 25])[0]
|
score = random.choices([1, 2, 3, 4, 5], weights=[5, 10, 20, 40, 25])[0]
|
||||||
|
|
||||||
await db.storelistingreview.create(
|
await db.storelistingreview.create(
|
||||||
data={
|
data=cast(
|
||||||
"storeListingVersionId": version.id,
|
StoreListingReviewCreateInput,
|
||||||
"reviewByUserId": reviewer.id,
|
{
|
||||||
"score": score,
|
"storeListingVersionId": version.id,
|
||||||
"comments": (
|
"reviewByUserId": reviewer.id,
|
||||||
faker.text(max_nb_chars=200)
|
"score": score,
|
||||||
if random.random() < 0.7
|
"comments": (
|
||||||
else None
|
faker.text(max_nb_chars=200)
|
||||||
),
|
if random.random() < 0.7
|
||||||
}
|
else None
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
new_reviews_count += 1
|
new_reviews_count += 1
|
||||||
|
|
||||||
@@ -244,17 +249,20 @@ async def main():
|
|||||||
)
|
)
|
||||||
|
|
||||||
await db.credittransaction.create(
|
await db.credittransaction.create(
|
||||||
data={
|
data=cast(
|
||||||
"userId": user.id,
|
CreditTransactionCreateInput,
|
||||||
"amount": amount,
|
{
|
||||||
"type": transaction_type,
|
"userId": user.id,
|
||||||
"metadata": Json(
|
"amount": amount,
|
||||||
{
|
"type": transaction_type,
|
||||||
"source": "test_updater",
|
"metadata": Json(
|
||||||
"timestamp": datetime.now().isoformat(),
|
{
|
||||||
}
|
"source": "test_updater",
|
||||||
),
|
"timestamp": datetime.now().isoformat(),
|
||||||
}
|
}
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
)
|
)
|
||||||
transaction_count += 1
|
transaction_count += 1
|
||||||
|
|
||||||
|
|||||||
21
autogpt_platform/backend/tools/db-migrate/.env.example
Normal file
21
autogpt_platform/backend/tools/db-migrate/.env.example
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# Database Migration Tool Environment Variables
|
||||||
|
#
|
||||||
|
# Copy this file to .env and fill in the actual values:
|
||||||
|
# cp .env.example .env
|
||||||
|
#
|
||||||
|
# NEVER commit the .env file - it contains sensitive credentials
|
||||||
|
|
||||||
|
# Source database (Supabase)
|
||||||
|
SOURCE_URL=postgresql://user:password@host:5432/database?schema=platform
|
||||||
|
|
||||||
|
# Destination database (GCP Cloud SQL or local)
|
||||||
|
DEST_URL=postgresql://user:password@host:5432/database?schema=platform
|
||||||
|
|
||||||
|
# Optional: GCP project for deploy.sh (default: agpt-dev)
|
||||||
|
# PROJECT_ID=agpt-dev
|
||||||
|
|
||||||
|
# Optional: GCP region (default: us-central1)
|
||||||
|
# REGION=us-central1
|
||||||
|
|
||||||
|
# Optional: K8s namespace for deploy.sh (default: dev-agpt)
|
||||||
|
# NAMESPACE=dev-agpt
|
||||||
14
autogpt_platform/backend/tools/db-migrate/.gitignore
vendored
Normal file
14
autogpt_platform/backend/tools/db-migrate/.gitignore
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Build artifacts
|
||||||
|
/target/
|
||||||
|
|
||||||
|
# Environment files with credentials
|
||||||
|
.env
|
||||||
|
|
||||||
|
# Editor files
|
||||||
|
.idea/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
*~
|
||||||
|
|
||||||
|
# OS files
|
||||||
|
.DS_Store
|
||||||
1907
autogpt_platform/backend/tools/db-migrate/Cargo.lock
generated
Normal file
1907
autogpt_platform/backend/tools/db-migrate/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
36
autogpt_platform/backend/tools/db-migrate/Cargo.toml
Normal file
36
autogpt_platform/backend/tools/db-migrate/Cargo.toml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
[package]
|
||||||
|
name = "db-migrate"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
description = "Database migration tool for AutoGPT Platform - Supabase to GCP Cloud SQL"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
# Async runtime
|
||||||
|
tokio = { version = "1", features = ["full"] }
|
||||||
|
|
||||||
|
# PostgreSQL
|
||||||
|
tokio-postgres = { version = "0.7", features = ["with-uuid-0_8", "with-chrono-0_4", "with-serde_json-1"] }
|
||||||
|
postgres-types = { version = "0.2", features = ["derive"] }
|
||||||
|
|
||||||
|
# CLI
|
||||||
|
clap = { version = "4", features = ["derive", "env"] }
|
||||||
|
|
||||||
|
# Serialization
|
||||||
|
serde = { version = "1", features = ["derive"] }
|
||||||
|
serde_json = "1"
|
||||||
|
|
||||||
|
# Utilities
|
||||||
|
uuid = { version = "0.8", features = ["serde"] }
|
||||||
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
|
anyhow = "1"
|
||||||
|
thiserror = "1"
|
||||||
|
tracing = "0.1"
|
||||||
|
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||||
|
indicatif = "0.17" # Progress bars
|
||||||
|
comfy-table = "7" # Pretty tables
|
||||||
|
url = "2"
|
||||||
|
futures = "0.3"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
opt-level = 3
|
||||||
|
lto = true
|
||||||
34
autogpt_platform/backend/tools/db-migrate/Dockerfile
Normal file
34
autogpt_platform/backend/tools/db-migrate/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
# Build stage
|
||||||
|
FROM rust:1.92-slim-bookworm AS builder
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y pkg-config libssl-dev && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY Cargo.toml Cargo.lock* ./
|
||||||
|
COPY src ./src
|
||||||
|
|
||||||
|
# Build release binary with size optimizations
|
||||||
|
RUN cargo build --release && \
|
||||||
|
strip /app/target/release/db-migrate && \
|
||||||
|
rm -rf /app/target/release/deps /app/target/release/build /app/target/release/.fingerprint /app/target/release/incremental
|
||||||
|
|
||||||
|
# Runtime stage - minimal image (~50MB)
|
||||||
|
FROM debian:bookworm-slim
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
ca-certificates \
|
||||||
|
libssl3 \
|
||||||
|
libpq5 && \
|
||||||
|
rm -rf /var/lib/apt/lists/* /var/cache/apt/*
|
||||||
|
|
||||||
|
# Copy only the binary
|
||||||
|
COPY --from=builder /app/target/release/db-migrate /usr/local/bin/db-migrate
|
||||||
|
|
||||||
|
# Run as non-root
|
||||||
|
RUN useradd -r -s /bin/false migrate
|
||||||
|
USER migrate
|
||||||
|
|
||||||
|
ENTRYPOINT ["db-migrate"]
|
||||||
129
autogpt_platform/backend/tools/db-migrate/README.md
Normal file
129
autogpt_platform/backend/tools/db-migrate/README.md
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
# db-migrate
|
||||||
|
|
||||||
|
Rust-based database migration tool for AutoGPT Platform - migrates from Supabase to GCP Cloud SQL.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Stream data efficiently using PostgreSQL COPY protocol
|
||||||
|
- Verify both databases match with row counts and checksums
|
||||||
|
- Migrate auth data (passwords, OAuth IDs) from Supabase auth.users
|
||||||
|
- Check all triggers and functions are in place
|
||||||
|
- Progress bars and detailed logging
|
||||||
|
|
||||||
|
## Build
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd backend/tools/db-migrate
|
||||||
|
cargo build --release
|
||||||
|
```
|
||||||
|
|
||||||
|
The binary will be at `target/release/db-migrate`.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set environment variables
|
||||||
|
export SOURCE_URL="postgresql://postgres:password@db.xxx.supabase.co:5432/postgres?schema=platform"
|
||||||
|
export DEST_URL="postgresql://postgres:password@ipaddress:5432/postgres?schema=platform"
|
||||||
|
|
||||||
|
# Or pass as arguments
|
||||||
|
db-migrate --source "..." --dest "..." <command>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Commands
|
||||||
|
|
||||||
|
### Quick Migration (Users + Auth only)
|
||||||
|
|
||||||
|
For testing login/signup ASAP:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
db-migrate quick
|
||||||
|
```
|
||||||
|
|
||||||
|
Migrates: User, Profile, UserOnboarding, UserBalance + auth data
|
||||||
|
|
||||||
|
### Full Migration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
db-migrate full
|
||||||
|
```
|
||||||
|
|
||||||
|
Migrates all tables (excluding large execution history by default).
|
||||||
|
|
||||||
|
### Schema Only
|
||||||
|
|
||||||
|
```bash
|
||||||
|
db-migrate schema
|
||||||
|
```
|
||||||
|
|
||||||
|
### Data Only
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# All tables (excluding large)
|
||||||
|
db-migrate data
|
||||||
|
|
||||||
|
# Specific table
|
||||||
|
db-migrate data --table User
|
||||||
|
```
|
||||||
|
|
||||||
|
### Auth Only
|
||||||
|
|
||||||
|
```bash
|
||||||
|
db-migrate auth
|
||||||
|
```
|
||||||
|
|
||||||
|
### Verify
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Row counts
|
||||||
|
db-migrate verify
|
||||||
|
|
||||||
|
# Include functions and triggers
|
||||||
|
db-migrate verify --check-functions
|
||||||
|
```
|
||||||
|
|
||||||
|
### Table Sizes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
db-migrate table-sizes
|
||||||
|
```
|
||||||
|
|
||||||
|
### Stream Large Tables
|
||||||
|
|
||||||
|
After initial migration, stream execution history:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# All large tables
|
||||||
|
db-migrate stream-large
|
||||||
|
|
||||||
|
# Specific table
|
||||||
|
db-migrate stream-large --table AgentGraphExecution
|
||||||
|
```
|
||||||
|
|
||||||
|
## Docker / Kubernetes
|
||||||
|
|
||||||
|
Build and run in a container:
|
||||||
|
|
||||||
|
```dockerfile
|
||||||
|
FROM rust:1.75-slim as builder
|
||||||
|
WORKDIR /app
|
||||||
|
COPY . .
|
||||||
|
RUN cargo build --release
|
||||||
|
|
||||||
|
FROM debian:bookworm-slim
|
||||||
|
RUN apt-get update && apt-get install -y libpq5 ca-certificates && rm -rf /var/lib/apt/lists/*
|
||||||
|
COPY --from=builder /app/target/release/db-migrate /usr/local/bin/
|
||||||
|
ENTRYPOINT ["db-migrate"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Large Tables (Excluded by Default)
|
||||||
|
|
||||||
|
These tables contain execution history (~37GB) and are excluded from initial migration:
|
||||||
|
|
||||||
|
- AgentGraphExecution (1.3 GB)
|
||||||
|
- AgentNodeExecution (6 GB)
|
||||||
|
- AgentNodeExecutionInputOutput (30 GB)
|
||||||
|
- AgentNodeExecutionKeyValueData
|
||||||
|
- NotificationEvent (94 MB)
|
||||||
|
|
||||||
|
Use `stream-large` command to migrate these after the initial migration.
|
||||||
122
autogpt_platform/backend/tools/db-migrate/deploy.sh
Executable file
122
autogpt_platform/backend/tools/db-migrate/deploy.sh
Executable file
@@ -0,0 +1,122 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Build and deploy db-migrate tool to GKE
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./deploy.sh [command] [args...]
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# ./deploy.sh solo --user-id abc-123
|
||||||
|
# ./deploy.sh quick
|
||||||
|
# ./deploy.sh full
|
||||||
|
#
|
||||||
|
# Environment variables required:
|
||||||
|
# SOURCE_URL - Source database URL (Supabase)
|
||||||
|
# DEST_URL - Destination database URL (GCP Cloud SQL)
|
||||||
|
#
|
||||||
|
# Optional:
|
||||||
|
# PROJECT_ID - GCP project (default: agpt-dev)
|
||||||
|
# NAMESPACE - K8s namespace (default: dev-agpt)
|
||||||
|
#
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
|
||||||
|
# Load .env file if it exists
|
||||||
|
if [[ -f "$SCRIPT_DIR/.env" ]]; then
|
||||||
|
set -a
|
||||||
|
source "$SCRIPT_DIR/.env"
|
||||||
|
set +a
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check required env vars
|
||||||
|
if [[ -z "${SOURCE_URL:-}" ]]; then
|
||||||
|
echo "ERROR: SOURCE_URL environment variable is required"
|
||||||
|
echo "Set it or create a .env file in this directory"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${DEST_URL:-}" ]]; then
|
||||||
|
echo "ERROR: DEST_URL environment variable is required"
|
||||||
|
echo "Set it or create a .env file in this directory"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PROJECT_ID="${PROJECT_ID:-agpt-dev}"
|
||||||
|
REGION="${REGION:-us-central1}"
|
||||||
|
IMAGE="gcr.io/${PROJECT_ID}/db-migrate:latest"
|
||||||
|
NAMESPACE="${NAMESPACE:-dev-agpt}"
|
||||||
|
|
||||||
|
echo "=== Building db-migrate ==="
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
docker build --platform linux/amd64 -t "$IMAGE" .
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Pushing to GCR ==="
|
||||||
|
docker push "$IMAGE"
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Deploying to GKE ==="
|
||||||
|
|
||||||
|
# Get the command and args
|
||||||
|
CMD="${1:-quick}"
|
||||||
|
shift || true
|
||||||
|
ARGS="${*:-}"
|
||||||
|
|
||||||
|
# Create a unique job name
|
||||||
|
JOB_NAME="db-migrate-$(date +%s)"
|
||||||
|
SECRET_NAME="db-migrate-creds-$(date +%s)"
|
||||||
|
|
||||||
|
# Create k8s secret for database credentials (won't be visible in job spec)
|
||||||
|
echo "Creating secret: ${SECRET_NAME}"
|
||||||
|
kubectl -n "${NAMESPACE}" create secret generic "${SECRET_NAME}" \
|
||||||
|
--from-literal=SOURCE_URL="${SOURCE_URL}" \
|
||||||
|
--from-literal=DEST_URL="${DEST_URL}" \
|
||||||
|
--dry-run=client -o yaml | kubectl apply -f -
|
||||||
|
|
||||||
|
cat <<EOF | kubectl apply -f -
|
||||||
|
apiVersion: batch/v1
|
||||||
|
kind: Job
|
||||||
|
metadata:
|
||||||
|
name: ${JOB_NAME}
|
||||||
|
namespace: ${NAMESPACE}
|
||||||
|
spec:
|
||||||
|
ttlSecondsAfterFinished: 3600
|
||||||
|
backoffLimit: 0
|
||||||
|
template:
|
||||||
|
spec:
|
||||||
|
serviceAccountName: dev-agpt-server-sa
|
||||||
|
restartPolicy: Never
|
||||||
|
containers:
|
||||||
|
- name: migrate
|
||||||
|
image: ${IMAGE}
|
||||||
|
args: ["${CMD}"${ARGS:+, $(echo "$ARGS" | sed 's/[^ ]*/"\0"/g' | tr ' ' ',')}]
|
||||||
|
env:
|
||||||
|
- name: SOURCE_URL
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: ${SECRET_NAME}
|
||||||
|
key: SOURCE_URL
|
||||||
|
- name: DEST_URL
|
||||||
|
valueFrom:
|
||||||
|
secretKeyRef:
|
||||||
|
name: ${SECRET_NAME}
|
||||||
|
key: DEST_URL
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
cpu: "500m"
|
||||||
|
memory: "1Gi"
|
||||||
|
limits:
|
||||||
|
cpu: "2"
|
||||||
|
memory: "4Gi"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Job created: ${JOB_NAME} ==="
|
||||||
|
echo ""
|
||||||
|
echo "View logs:"
|
||||||
|
echo " kubectl -n ${NAMESPACE} logs -f job/${JOB_NAME}"
|
||||||
|
echo ""
|
||||||
|
echo "Delete job:"
|
||||||
|
echo " kubectl -n ${NAMESPACE} delete job ${JOB_NAME}"
|
||||||
57
autogpt_platform/backend/tools/db-migrate/run-local.sh
Executable file
57
autogpt_platform/backend/tools/db-migrate/run-local.sh
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Build and run db-migrate locally against the databases
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./run-local.sh [command] [args...]
|
||||||
|
#
|
||||||
|
# Examples:
|
||||||
|
# ./run-local.sh table-sizes
|
||||||
|
# ./run-local.sh solo --user-id abc-123
|
||||||
|
# ./run-local.sh quick --dry-run
|
||||||
|
# ./run-local.sh verify
|
||||||
|
#
|
||||||
|
# Environment variables required:
|
||||||
|
# SOURCE_URL - Source database URL (Supabase)
|
||||||
|
# DEST_URL - Destination database URL (GCP Cloud SQL)
|
||||||
|
#
|
||||||
|
# You can create a .env file in this directory with:
|
||||||
|
# SOURCE_URL=postgresql://user:pass@host:5432/db?schema=platform
|
||||||
|
# DEST_URL=postgresql://user:pass@host:5432/db?schema=platform
|
||||||
|
#
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
|
||||||
|
# Load .env file if it exists
|
||||||
|
if [[ -f "$SCRIPT_DIR/.env" ]]; then
|
||||||
|
set -a
|
||||||
|
source "$SCRIPT_DIR/.env"
|
||||||
|
set +a
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check required env vars
|
||||||
|
if [[ -z "${SOURCE_URL:-}" ]]; then
|
||||||
|
echo "ERROR: SOURCE_URL environment variable is required"
|
||||||
|
echo "Set it or create a .env file in this directory"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -z "${DEST_URL:-}" ]]; then
|
||||||
|
echo "ERROR: DEST_URL environment variable is required"
|
||||||
|
echo "Set it or create a .env file in this directory"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "=== Building db-migrate ==="
|
||||||
|
cd "$SCRIPT_DIR"
|
||||||
|
cargo build --release
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "=== Running ==="
|
||||||
|
echo "Source: ${SOURCE_URL%%@*}@..."
|
||||||
|
echo "Dest: ${DEST_URL%%@*}@..."
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
./target/release/db-migrate "$@"
|
||||||
418
autogpt_platform/backend/tools/db-migrate/src/auth.rs
Normal file
418
autogpt_platform/backend/tools/db-migrate/src/auth.rs
Normal file
@@ -0,0 +1,418 @@
|
|||||||
|
use crate::db::Database;
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use comfy_table::{presets::UTF8_FULL, Cell, Color, Table};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
/// Migrate auth data from Supabase auth.users to platform.User
|
||||||
|
pub async fn migrate_auth(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Migrating auth data from Supabase...");
|
||||||
|
|
||||||
|
// Check if auth.users exists in source
|
||||||
|
let auth_exists = source
|
||||||
|
.query(
|
||||||
|
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'auth' AND table_name = 'users')",
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let exists: bool = auth_exists.first().map(|r| r.get(0)).unwrap_or(false);
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
warn!("No auth.users table found in source - skipping auth migration");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get count of users to migrate
|
||||||
|
let count_rows = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM auth.users
|
||||||
|
WHERE encrypted_password IS NOT NULL
|
||||||
|
OR raw_app_meta_data->>'provider' = 'google'
|
||||||
|
"#,
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let auth_user_count: i64 = count_rows.first().map(|r| r.get(0)).unwrap_or(0);
|
||||||
|
info!("Found {} users with auth data to migrate", auth_user_count);
|
||||||
|
|
||||||
|
// Create temp table in destination
|
||||||
|
// Note: platform.User.id is TEXT, not UUID, so we use TEXT here for compatibility
|
||||||
|
info!("Creating temp table for auth data...");
|
||||||
|
dest.batch_execute(
|
||||||
|
r#"
|
||||||
|
CREATE TEMP TABLE IF NOT EXISTS temp_auth_users (
|
||||||
|
id TEXT,
|
||||||
|
encrypted_password TEXT,
|
||||||
|
email_verified BOOLEAN,
|
||||||
|
google_id TEXT
|
||||||
|
)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Extract and insert auth data in batches
|
||||||
|
info!("Extracting auth data from source...");
|
||||||
|
|
||||||
|
let batch_size = 1000i64;
|
||||||
|
let mut offset = 0i64;
|
||||||
|
let mut total_migrated = 0i64;
|
||||||
|
|
||||||
|
while offset < auth_user_count {
|
||||||
|
let rows = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
encrypted_password,
|
||||||
|
(email_confirmed_at IS NOT NULL) as email_verified,
|
||||||
|
CASE
|
||||||
|
WHEN raw_app_meta_data->>'provider' = 'google'
|
||||||
|
THEN raw_app_meta_data->>'provider_id'
|
||||||
|
ELSE NULL
|
||||||
|
END as google_id
|
||||||
|
FROM auth.users
|
||||||
|
WHERE encrypted_password IS NOT NULL
|
||||||
|
OR raw_app_meta_data->>'provider' = 'google'
|
||||||
|
ORDER BY created_at
|
||||||
|
LIMIT $1 OFFSET $2
|
||||||
|
"#,
|
||||||
|
&[&batch_size, &offset],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if rows.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert into temp table
|
||||||
|
for row in &rows {
|
||||||
|
let id: uuid::Uuid = row.get(0);
|
||||||
|
let password: Option<String> = row.get(1);
|
||||||
|
let email_verified: bool = row.get(2);
|
||||||
|
let google_id: Option<String> = row.get(3);
|
||||||
|
|
||||||
|
let insert_sql = format!(
|
||||||
|
"INSERT INTO temp_auth_users (id, encrypted_password, email_verified, google_id) VALUES ('{}', {}, {}, {})",
|
||||||
|
id,
|
||||||
|
password.as_ref().map(|p| format!("'{}'", p.replace('\'', "''"))).unwrap_or_else(|| "NULL".to_string()),
|
||||||
|
email_verified,
|
||||||
|
google_id.as_ref().map(|g| format!("'{}'", g.replace('\'', "''"))).unwrap_or_else(|| "NULL".to_string()),
|
||||||
|
);
|
||||||
|
|
||||||
|
dest.batch_execute(&insert_sql).await?;
|
||||||
|
total_migrated += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += rows.len() as i64;
|
||||||
|
info!(" Processed {}/{} auth records", offset, auth_user_count);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Migrated {} auth records to temp table", total_migrated);
|
||||||
|
|
||||||
|
// Update User table with password hashes
|
||||||
|
info!("Updating User table with password hashes...");
|
||||||
|
let password_result = dest
|
||||||
|
.execute(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
UPDATE {schema}."User" u
|
||||||
|
SET "passwordHash" = t.encrypted_password
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.encrypted_password IS NOT NULL
|
||||||
|
AND u."passwordHash" IS NULL
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated {} users with password hashes", password_result);
|
||||||
|
|
||||||
|
// Update email verification status
|
||||||
|
info!("Updating email verification status...");
|
||||||
|
let email_result = dest
|
||||||
|
.execute(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
UPDATE {schema}."User" u
|
||||||
|
SET "emailVerified" = t.email_verified
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.email_verified = true
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated {} users with email verification", email_result);
|
||||||
|
|
||||||
|
// Update Google OAuth IDs
|
||||||
|
info!("Updating Google OAuth IDs...");
|
||||||
|
let google_result = dest
|
||||||
|
.execute(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
UPDATE {schema}."User" u
|
||||||
|
SET "googleId" = t.google_id
|
||||||
|
FROM temp_auth_users t
|
||||||
|
WHERE u.id = t.id
|
||||||
|
AND t.google_id IS NOT NULL
|
||||||
|
AND u."googleId" IS NULL
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated {} users with Google OAuth IDs", google_result);
|
||||||
|
|
||||||
|
// Clean up temp table
|
||||||
|
dest.batch_execute("DROP TABLE IF EXISTS temp_auth_users")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Auth migration complete!");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify auth migration
|
||||||
|
pub async fn verify_auth(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Verifying auth migration...");
|
||||||
|
|
||||||
|
// Get source stats from auth.users
|
||||||
|
let source_stats = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as total,
|
||||||
|
COUNT(encrypted_password) as with_password,
|
||||||
|
COUNT(CASE WHEN raw_app_meta_data->>'provider' = 'google' THEN 1 END) as with_google,
|
||||||
|
COUNT(CASE WHEN email_confirmed_at IS NOT NULL THEN 1 END) as email_verified
|
||||||
|
FROM auth.users
|
||||||
|
"#,
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Get destination stats from User table
|
||||||
|
let dest_stats = dest
|
||||||
|
.query(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
COUNT(*) as total,
|
||||||
|
COUNT("passwordHash") as with_password,
|
||||||
|
COUNT("googleId") as with_google,
|
||||||
|
COUNT(CASE WHEN "emailVerified" = true THEN 1 END) as email_verified
|
||||||
|
FROM {schema}."User"
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Metric", "Source (auth.users)", "Dest (User)", "Status"]);
|
||||||
|
|
||||||
|
let metrics = ["Total Users", "With Password", "With Google OAuth", "Email Verified"];
|
||||||
|
|
||||||
|
let source_row = source_stats.first().context("No source stats")?;
|
||||||
|
let dest_row = dest_stats.first().context("No dest stats")?;
|
||||||
|
|
||||||
|
let mut all_match = true;
|
||||||
|
|
||||||
|
for (i, metric) in metrics.iter().enumerate() {
|
||||||
|
let source_val: i64 = source_row.get(i);
|
||||||
|
let dest_val: i64 = dest_row.get(i);
|
||||||
|
|
||||||
|
// For total users, dest may have fewer (users without auth)
|
||||||
|
// For auth fields, they should match or dest should be >= source
|
||||||
|
let status = if i == 0 {
|
||||||
|
// Total users - dest should be >= source users with auth
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else if dest_val >= source_val * 95 / 100 {
|
||||||
|
// Allow 5% tolerance for auth fields
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else {
|
||||||
|
all_match = false;
|
||||||
|
Cell::new("LOW").fg(Color::Yellow)
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(vec![
|
||||||
|
Cell::new(*metric),
|
||||||
|
Cell::new(source_val),
|
||||||
|
Cell::new(dest_val),
|
||||||
|
status,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nAuth Migration Verification:\n{}", table);
|
||||||
|
|
||||||
|
// Check for users without any auth method
|
||||||
|
let orphan_check = dest
|
||||||
|
.query(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
SELECT COUNT(*)
|
||||||
|
FROM {schema}."User"
|
||||||
|
WHERE "passwordHash" IS NULL
|
||||||
|
AND "googleId" IS NULL
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let orphans: i64 = orphan_check.first().map(|r| r.get(0)).unwrap_or(0);
|
||||||
|
|
||||||
|
if orphans > 0 {
|
||||||
|
warn!(
|
||||||
|
"{} users have no auth method (may be other OAuth providers)",
|
||||||
|
orphans
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if all_match {
|
||||||
|
info!("Auth verification passed!");
|
||||||
|
} else {
|
||||||
|
warn!("Some auth metrics don't match - review above table");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate auth data for a single user
|
||||||
|
pub async fn migrate_single_user_auth(source: &Database, dest: &Database, user_id: &str) -> Result<()> {
|
||||||
|
// Parse as UUID for auth.users query (Supabase uses native UUID)
|
||||||
|
let uid = uuid::Uuid::parse_str(user_id).context("Invalid user ID format")?;
|
||||||
|
|
||||||
|
info!("Migrating auth for user: {}", user_id);
|
||||||
|
|
||||||
|
// Check if auth.users exists
|
||||||
|
let auth_exists = source
|
||||||
|
.query(
|
||||||
|
"SELECT EXISTS (SELECT FROM information_schema.tables WHERE table_schema = 'auth' AND table_name = 'users')",
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let exists: bool = auth_exists.first().map(|r| r.get(0)).unwrap_or(false);
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
warn!("No auth.users table found - skipping");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get auth data for this user (auth.users uses native UUID type)
|
||||||
|
let rows = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
encrypted_password,
|
||||||
|
(email_confirmed_at IS NOT NULL) as email_verified,
|
||||||
|
CASE
|
||||||
|
WHEN raw_app_meta_data->>'provider' = 'google'
|
||||||
|
THEN raw_app_meta_data->>'provider_id'
|
||||||
|
ELSE NULL
|
||||||
|
END as google_id
|
||||||
|
FROM auth.users
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
&[&uid],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(row) = rows.first() {
|
||||||
|
let password: Option<String> = row.get(0);
|
||||||
|
let email_verified: bool = row.get(1);
|
||||||
|
let google_id: Option<String> = row.get(2);
|
||||||
|
|
||||||
|
info!(" Found auth data:");
|
||||||
|
info!(" Has password: {}", password.is_some());
|
||||||
|
info!(" Email verified: {}", email_verified);
|
||||||
|
info!(" Has Google ID: {}", google_id.is_some());
|
||||||
|
|
||||||
|
// Update password hash (platform.User.id is String, not UUID)
|
||||||
|
if let Some(ref pw) = password {
|
||||||
|
dest.execute(
|
||||||
|
&format!(
|
||||||
|
"UPDATE {}.\"User\" SET \"passwordHash\" = $1 WHERE id = $2 AND \"passwordHash\" IS NULL",
|
||||||
|
dest.schema()
|
||||||
|
),
|
||||||
|
&[pw, &user_id],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated password hash");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update email verified
|
||||||
|
if email_verified {
|
||||||
|
dest.execute(
|
||||||
|
&format!(
|
||||||
|
"UPDATE {}.\"User\" SET \"emailVerified\" = true WHERE id = $1",
|
||||||
|
dest.schema()
|
||||||
|
),
|
||||||
|
&[&user_id],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated email verification");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update Google ID
|
||||||
|
if let Some(ref gid) = google_id {
|
||||||
|
dest.execute(
|
||||||
|
&format!(
|
||||||
|
"UPDATE {}.\"User\" SET \"googleId\" = $1 WHERE id = $2 AND \"googleId\" IS NULL",
|
||||||
|
dest.schema()
|
||||||
|
),
|
||||||
|
&[gid, &user_id],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
info!(" Updated Google OAuth ID");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
warn!(" No auth data found for user");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Show detailed auth comparison
|
||||||
|
pub async fn compare_auth_details(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Comparing auth details...");
|
||||||
|
|
||||||
|
// Find users in source auth.users but missing auth in dest
|
||||||
|
let missing = dest
|
||||||
|
.query(
|
||||||
|
&format!(
|
||||||
|
r#"
|
||||||
|
SELECT u.id, u.email
|
||||||
|
FROM {schema}."User" u
|
||||||
|
WHERE u."passwordHash" IS NULL
|
||||||
|
AND u."googleId" IS NULL
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
schema = dest.schema()
|
||||||
|
),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !missing.is_empty() {
|
||||||
|
println!("\nSample users without auth method:");
|
||||||
|
for row in missing {
|
||||||
|
let id: String = row.get(0); // platform.User.id is String
|
||||||
|
let email: String = row.get(1);
|
||||||
|
println!(" {} - {}", id, email);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
213
autogpt_platform/backend/tools/db-migrate/src/db.rs
Normal file
213
autogpt_platform/backend/tools/db-migrate/src/db.rs
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
use anyhow::{Context, Result};
|
||||||
|
use tokio_postgres::{Client, NoTls, Row};
|
||||||
|
use url::Url;
|
||||||
|
|
||||||
|
pub struct Database {
|
||||||
|
client: Client,
|
||||||
|
schema: String,
|
||||||
|
host: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Database {
|
||||||
|
pub async fn connect(url: &str, schema: &str) -> Result<Self> {
|
||||||
|
// Parse URL to extract host for display
|
||||||
|
let parsed = Url::parse(url).context("Invalid database URL")?;
|
||||||
|
let host = parsed.host_str().unwrap_or("unknown").to_string();
|
||||||
|
|
||||||
|
// Remove schema parameter from URL for tokio-postgres
|
||||||
|
let base_url = url.split('?').next().unwrap_or(url);
|
||||||
|
|
||||||
|
let (client, connection) = tokio_postgres::connect(base_url, NoTls)
|
||||||
|
.await
|
||||||
|
.context("Failed to connect to database")?;
|
||||||
|
|
||||||
|
// Spawn connection handler
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Err(e) = connection.await {
|
||||||
|
eprintln!("Database connection error: {}", e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Set search path to schema
|
||||||
|
client
|
||||||
|
.execute(&format!("SET search_path TO {}", schema), &[])
|
||||||
|
.await
|
||||||
|
.context("Failed to set search_path")?;
|
||||||
|
|
||||||
|
Ok(Self {
|
||||||
|
client,
|
||||||
|
schema: schema.to_string(),
|
||||||
|
host,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn host(&self) -> &str {
|
||||||
|
&self.host
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn schema(&self) -> &str {
|
||||||
|
&self.schema
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn client(&self) -> &Client {
|
||||||
|
&self.client
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn query(&self, sql: &str, params: &[&(dyn tokio_postgres::types::ToSql + Sync)]) -> Result<Vec<Row>> {
|
||||||
|
self.client
|
||||||
|
.query(sql, params)
|
||||||
|
.await
|
||||||
|
.context("Query failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn execute(&self, sql: &str, params: &[&(dyn tokio_postgres::types::ToSql + Sync)]) -> Result<u64> {
|
||||||
|
self.client
|
||||||
|
.execute(sql, params)
|
||||||
|
.await
|
||||||
|
.context("Execute failed")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn batch_execute(&self, sql: &str) -> Result<()> {
|
||||||
|
self.client
|
||||||
|
.batch_execute(sql)
|
||||||
|
.await
|
||||||
|
.with_context(|| format!("Batch execute failed:\n{}", sql.chars().take(500).collect::<String>()))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get all table names in the schema
|
||||||
|
pub async fn get_tables(&self) -> Result<Vec<String>> {
|
||||||
|
let rows = self
|
||||||
|
.query(
|
||||||
|
"SELECT tablename FROM pg_tables WHERE schemaname = $1 ORDER BY tablename",
|
||||||
|
&[&self.schema],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(rows.iter().map(|r| r.get::<_, String>(0)).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get row count for a table
|
||||||
|
pub async fn get_row_count(&self, table: &str) -> Result<i64> {
|
||||||
|
let sql = format!("SELECT COUNT(*) FROM {}.\"{}\"", self.schema, table);
|
||||||
|
let rows = self.query(&sql, &[]).await?;
|
||||||
|
Ok(rows[0].get(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get table size
|
||||||
|
pub async fn get_table_size(&self, table: &str) -> Result<(i64, String)> {
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT pg_total_relation_size('{}.\"{}\"'), pg_size_pretty(pg_total_relation_size('{}.\"{}\"'))",
|
||||||
|
self.schema, table, self.schema, table
|
||||||
|
);
|
||||||
|
let rows = self.query(&sql, &[]).await?;
|
||||||
|
Ok((rows[0].get(0), rows[0].get(1)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Check if table exists
|
||||||
|
pub async fn table_exists(&self, table: &str) -> Result<bool> {
|
||||||
|
let rows = self
|
||||||
|
.query(
|
||||||
|
"SELECT EXISTS (SELECT FROM pg_tables WHERE schemaname = $1 AND tablename = $2)",
|
||||||
|
&[&self.schema, &table],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
Ok(rows[0].get(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get functions in schema
|
||||||
|
pub async fn get_functions(&self) -> Result<Vec<(String, String)>> {
|
||||||
|
let rows = self
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT p.proname, pg_get_functiondef(p.oid)
|
||||||
|
FROM pg_proc p
|
||||||
|
JOIN pg_namespace n ON p.pronamespace = n.oid
|
||||||
|
WHERE n.nspname = $1
|
||||||
|
ORDER BY p.proname
|
||||||
|
"#,
|
||||||
|
&[&self.schema],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| (r.get::<_, String>(0), r.get::<_, String>(1)))
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get triggers in schema
|
||||||
|
pub async fn get_triggers(&self) -> Result<Vec<(String, String, String)>> {
|
||||||
|
let rows = self
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
t.tgname,
|
||||||
|
c.relname as table_name,
|
||||||
|
pg_get_triggerdef(t.oid)
|
||||||
|
FROM pg_trigger t
|
||||||
|
JOIN pg_class c ON t.tgrelid = c.oid
|
||||||
|
JOIN pg_namespace n ON c.relnamespace = n.oid
|
||||||
|
WHERE n.nspname = $1
|
||||||
|
AND NOT t.tgisinternal
|
||||||
|
ORDER BY c.relname, t.tgname
|
||||||
|
"#,
|
||||||
|
&[&self.schema],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
(
|
||||||
|
r.get::<_, String>(0),
|
||||||
|
r.get::<_, String>(1),
|
||||||
|
r.get::<_, String>(2),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get materialized views
|
||||||
|
pub async fn get_materialized_views(&self) -> Result<Vec<String>> {
|
||||||
|
let rows = self
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT matviewname
|
||||||
|
FROM pg_matviews
|
||||||
|
WHERE schemaname = $1
|
||||||
|
ORDER BY matviewname
|
||||||
|
"#,
|
||||||
|
&[&self.schema],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(rows.iter().map(|r| r.get::<_, String>(0)).collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Copy data from table using COPY protocol (for streaming)
|
||||||
|
pub async fn copy_out(&self, table: &str) -> Result<Vec<u8>> {
|
||||||
|
let sql = format!(
|
||||||
|
"COPY {}.\"{}\" TO STDOUT WITH (FORMAT binary)",
|
||||||
|
self.schema, table
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream = self
|
||||||
|
.client
|
||||||
|
.copy_out(&sql)
|
||||||
|
.await
|
||||||
|
.context("COPY OUT failed")?;
|
||||||
|
|
||||||
|
use futures::StreamExt;
|
||||||
|
use tokio_postgres::binary_copy::BinaryCopyOutStream;
|
||||||
|
|
||||||
|
let mut data = Vec::new();
|
||||||
|
let mut stream = std::pin::pin!(stream);
|
||||||
|
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
let chunk = chunk.context("Error reading COPY stream")?;
|
||||||
|
data.extend_from_slice(&chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(data)
|
||||||
|
}
|
||||||
|
}
|
||||||
276
autogpt_platform/backend/tools/db-migrate/src/main.rs
Normal file
276
autogpt_platform/backend/tools/db-migrate/src/main.rs
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
use anyhow::{Context, Result};
|
||||||
|
use clap::{Parser, Subcommand};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
use tracing_subscriber::{fmt, EnvFilter};
|
||||||
|
|
||||||
|
mod db;
|
||||||
|
mod migrate;
|
||||||
|
mod verify;
|
||||||
|
mod auth;
|
||||||
|
|
||||||
|
#[derive(Parser)]
|
||||||
|
#[command(name = "db-migrate")]
|
||||||
|
#[command(about = "Database migration tool for AutoGPT Platform")]
|
||||||
|
struct Cli {
|
||||||
|
/// Source database URL (Supabase)
|
||||||
|
#[arg(long, env = "SOURCE_URL")]
|
||||||
|
source: String,
|
||||||
|
|
||||||
|
/// Destination database URL (GCP Cloud SQL)
|
||||||
|
#[arg(long, env = "DEST_URL")]
|
||||||
|
dest: String,
|
||||||
|
|
||||||
|
/// Schema name (default: platform)
|
||||||
|
#[arg(long, default_value = "platform")]
|
||||||
|
schema: String,
|
||||||
|
|
||||||
|
/// Dry run mode - verify everything works without making changes
|
||||||
|
#[arg(long, global = true)]
|
||||||
|
dry_run: bool,
|
||||||
|
|
||||||
|
#[command(subcommand)]
|
||||||
|
command: Commands,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Subcommand)]
|
||||||
|
enum Commands {
|
||||||
|
/// Run full migration (schema + data + auth + verify)
|
||||||
|
Full {
|
||||||
|
/// Skip large execution tables
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
skip_large_tables: bool,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Quick migration: User, Profile, UserOnboarding, UserBalance + auth (for testing)
|
||||||
|
Quick,
|
||||||
|
|
||||||
|
/// Solo run: migrate a single user's data for testing
|
||||||
|
Solo {
|
||||||
|
/// User ID to migrate (uses first user if not specified)
|
||||||
|
#[arg(long)]
|
||||||
|
user_id: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Migrate schema only
|
||||||
|
Schema,
|
||||||
|
|
||||||
|
/// Migrate data only (assumes schema exists)
|
||||||
|
Data {
|
||||||
|
/// Skip large execution tables
|
||||||
|
#[arg(long, default_value = "true")]
|
||||||
|
skip_large_tables: bool,
|
||||||
|
|
||||||
|
/// Specific table to migrate
|
||||||
|
#[arg(long)]
|
||||||
|
table: Option<String>,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Migrate auth data (passwords, OAuth IDs)
|
||||||
|
Auth,
|
||||||
|
|
||||||
|
/// Verify both databases match
|
||||||
|
Verify {
|
||||||
|
/// Check triggers and functions
|
||||||
|
#[arg(long)]
|
||||||
|
check_functions: bool,
|
||||||
|
},
|
||||||
|
|
||||||
|
/// Show table sizes in source
|
||||||
|
TableSizes,
|
||||||
|
|
||||||
|
/// Stream large tables (execution history)
|
||||||
|
StreamLarge {
|
||||||
|
/// Specific table to stream
|
||||||
|
#[arg(long)]
|
||||||
|
table: Option<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() -> Result<()> {
|
||||||
|
// Initialize logging
|
||||||
|
tracing_subscriber::fmt()
|
||||||
|
.with_env_filter(EnvFilter::from_default_env().add_directive("db_migrate=info".parse()?))
|
||||||
|
.init();
|
||||||
|
|
||||||
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
info!("Connecting to databases...");
|
||||||
|
let source = db::Database::connect(&cli.source, &cli.schema).await?;
|
||||||
|
let dest = db::Database::connect(&cli.dest, &cli.schema).await?;
|
||||||
|
|
||||||
|
info!("Source: {}", source.host());
|
||||||
|
info!("Destination: {}", dest.host());
|
||||||
|
|
||||||
|
if cli.dry_run {
|
||||||
|
warn!("DRY RUN MODE - No changes will be made");
|
||||||
|
}
|
||||||
|
|
||||||
|
match cli.command {
|
||||||
|
Commands::Full { skip_large_tables } => {
|
||||||
|
info!("=== Running Full Migration ===");
|
||||||
|
|
||||||
|
// Step 1: Migrate schema
|
||||||
|
info!("\n=== Step 1: Migrating Schema ===");
|
||||||
|
migrate::migrate_schema(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 2: Migrate data
|
||||||
|
info!("\n=== Step 2: Migrating Data ===");
|
||||||
|
migrate::migrate_data(&source, &dest, skip_large_tables).await?;
|
||||||
|
|
||||||
|
// Step 3: Verify data
|
||||||
|
info!("\n=== Step 3: Verifying Data ===");
|
||||||
|
verify::verify_row_counts(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 4: Migrate auth
|
||||||
|
info!("\n=== Step 4: Migrating Auth Data ===");
|
||||||
|
auth::migrate_auth(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 5: Verify auth
|
||||||
|
info!("\n=== Step 5: Verifying Auth Migration ===");
|
||||||
|
auth::verify_auth(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 6: Check functions/triggers
|
||||||
|
info!("\n=== Step 6: Checking Functions & Triggers ===");
|
||||||
|
verify::verify_functions(&source, &dest).await?;
|
||||||
|
|
||||||
|
info!("\n=== Migration Complete! ===");
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Quick => {
|
||||||
|
info!("=== Quick Migration: Users, Profiles, Auth ===");
|
||||||
|
|
||||||
|
let quick_tables = vec![
|
||||||
|
"User",
|
||||||
|
"Profile",
|
||||||
|
"UserOnboarding",
|
||||||
|
"UserBalance",
|
||||||
|
];
|
||||||
|
|
||||||
|
// Step 1: Migrate schema for quick tables
|
||||||
|
info!("\n=== Step 1: Creating Schema ===");
|
||||||
|
migrate::migrate_schema(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 1.5: Verify all quick tables exist in destination
|
||||||
|
info!("\n=== Step 1.5: Verifying Tables Exist ===");
|
||||||
|
for table in &quick_tables {
|
||||||
|
let exists = dest.table_exists(table).await?;
|
||||||
|
if !exists {
|
||||||
|
anyhow::bail!("Table {} was not created in destination! Check schema migration errors.", table);
|
||||||
|
}
|
||||||
|
info!(" ✓ {} exists", table);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 2: Migrate user-related tables
|
||||||
|
info!("\n=== Step 2: Migrating User Tables ===");
|
||||||
|
for table in &quick_tables {
|
||||||
|
info!("Migrating {}...", table);
|
||||||
|
migrate::migrate_table(&source, &dest, table).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step 3: Migrate auth
|
||||||
|
info!("\n=== Step 3: Migrating Auth Data ===");
|
||||||
|
auth::migrate_auth(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Step 4: Verify
|
||||||
|
info!("\n=== Step 4: Verification ===");
|
||||||
|
for table in &quick_tables {
|
||||||
|
let source_count = source.get_row_count(table).await?;
|
||||||
|
let dest_count = dest.get_row_count(table).await?;
|
||||||
|
let status = if source_count == dest_count { "✓" } else { "✗" };
|
||||||
|
info!(" {}: {} -> {} {}", table, source_count, dest_count, status);
|
||||||
|
}
|
||||||
|
auth::verify_auth(&source, &dest).await?;
|
||||||
|
|
||||||
|
info!("\n=== Quick Migration Complete! ===");
|
||||||
|
info!("You can now test user login/signup");
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Solo { user_id } => {
|
||||||
|
info!("=== Solo Run: Single User Migration ===");
|
||||||
|
|
||||||
|
// Get a user ID to migrate
|
||||||
|
let uid = if let Some(id) = user_id {
|
||||||
|
id
|
||||||
|
} else {
|
||||||
|
// Get first user from source (id is stored as String in Prisma)
|
||||||
|
let rows = source
|
||||||
|
.query(
|
||||||
|
&format!("SELECT id FROM {}.\"User\" LIMIT 1", source.schema()),
|
||||||
|
&[],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
let id: String = rows.first().context("No users found")?.get(0);
|
||||||
|
id
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Migrating user: {}", uid);
|
||||||
|
|
||||||
|
// Create schema
|
||||||
|
info!("\n=== Step 1: Creating Schema ===");
|
||||||
|
migrate::migrate_schema(&source, &dest).await?;
|
||||||
|
|
||||||
|
// Migrate single user
|
||||||
|
info!("\n=== Step 2: Migrating Single User ===");
|
||||||
|
migrate::migrate_single_user(&source, &dest, &uid).await?;
|
||||||
|
|
||||||
|
// Migrate auth for this user
|
||||||
|
info!("\n=== Step 3: Migrating Auth ===");
|
||||||
|
auth::migrate_single_user_auth(&source, &dest, &uid).await?;
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
info!("\n=== Step 4: Verification ===");
|
||||||
|
let dest_user = dest
|
||||||
|
.query(
|
||||||
|
&format!("SELECT id, email, \"passwordHash\" IS NOT NULL as has_pw, \"googleId\" IS NOT NULL as has_google FROM {}.\"User\" WHERE id = $1", dest.schema()),
|
||||||
|
&[&uid],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(row) = dest_user.first() {
|
||||||
|
let email: String = row.get(1);
|
||||||
|
let has_pw: bool = row.get(2);
|
||||||
|
let has_google: bool = row.get(3);
|
||||||
|
info!(" Email: {}", email);
|
||||||
|
info!(" Has password: {}", has_pw);
|
||||||
|
info!(" Has Google OAuth: {}", has_google);
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("\n=== Solo Run Complete! ===");
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Schema => {
|
||||||
|
migrate::migrate_schema(&source, &dest).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Data { skip_large_tables, table } => {
|
||||||
|
if let Some(table_name) = table {
|
||||||
|
migrate::migrate_table(&source, &dest, &table_name).await?;
|
||||||
|
} else {
|
||||||
|
migrate::migrate_data(&source, &dest, skip_large_tables).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Auth => {
|
||||||
|
auth::migrate_auth(&source, &dest).await?;
|
||||||
|
auth::verify_auth(&source, &dest).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::Verify { check_functions } => {
|
||||||
|
verify::verify_row_counts(&source, &dest).await?;
|
||||||
|
if check_functions {
|
||||||
|
verify::verify_functions(&source, &dest).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::TableSizes => {
|
||||||
|
verify::show_table_sizes(&source).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Commands::StreamLarge { table } => {
|
||||||
|
migrate::stream_large_tables(&source, &dest, table).await?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
603
autogpt_platform/backend/tools/db-migrate/src/migrate.rs
Normal file
603
autogpt_platform/backend/tools/db-migrate/src/migrate.rs
Normal file
@@ -0,0 +1,603 @@
|
|||||||
|
use crate::db::Database;
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use indicatif::{ProgressBar, ProgressStyle};
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
/// Get default value for NULL columns that have NOT NULL constraints in dest
|
||||||
|
/// Returns Some(default_sql) if a default should be used, None otherwise
|
||||||
|
fn get_null_default(table: &str, column: &str) -> Option<&'static str> {
|
||||||
|
match (table, column) {
|
||||||
|
// User table - all Prisma @default values
|
||||||
|
("User", "createdAt") => Some("NOW()"),
|
||||||
|
("User", "updatedAt") => Some("NOW()"),
|
||||||
|
("User", "metadata") => Some("'{}'::jsonb"),
|
||||||
|
("User", "integrations") => Some("''"),
|
||||||
|
("User", "emailVerified") => Some("false"),
|
||||||
|
("User", "role") => Some("'authenticated'"),
|
||||||
|
("User", "maxEmailsPerDay") => Some("3"),
|
||||||
|
("User", "notifyOnAgentRun") => Some("true"),
|
||||||
|
("User", "notifyOnZeroBalance") => Some("true"),
|
||||||
|
("User", "notifyOnLowBalance") => Some("true"),
|
||||||
|
("User", "notifyOnBlockExecutionFailed") => Some("true"),
|
||||||
|
("User", "notifyOnContinuousAgentError") => Some("true"),
|
||||||
|
("User", "notifyOnDailySummary") => Some("true"),
|
||||||
|
("User", "notifyOnWeeklySummary") => Some("true"),
|
||||||
|
("User", "notifyOnMonthlySummary") => Some("true"),
|
||||||
|
("User", "notifyOnAgentApproved") => Some("true"),
|
||||||
|
("User", "notifyOnAgentRejected") => Some("true"),
|
||||||
|
("User", "timezone") => Some("'not-set'"),
|
||||||
|
// UserOnboarding defaults
|
||||||
|
("UserOnboarding", "createdAt") => Some("NOW()"),
|
||||||
|
("UserOnboarding", "updatedAt") => Some("NOW()"),
|
||||||
|
// UserBalance defaults
|
||||||
|
("UserBalance", "balance") => Some("0"),
|
||||||
|
("UserBalance", "updatedAt") => Some("NOW()"),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Tables to skip during initial migration (large execution history)
|
||||||
|
const LARGE_TABLES: &[&str] = &[
|
||||||
|
"AgentGraphExecution",
|
||||||
|
"AgentNodeExecution",
|
||||||
|
"AgentNodeExecutionInputOutput",
|
||||||
|
"AgentNodeExecutionKeyValueData",
|
||||||
|
"NotificationEvent",
|
||||||
|
];
|
||||||
|
|
||||||
|
/// Migrate schema from source to destination
|
||||||
|
pub async fn migrate_schema(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Fetching schema from source...");
|
||||||
|
|
||||||
|
// Get CREATE statements for tables
|
||||||
|
let tables = source.get_tables().await?;
|
||||||
|
info!("Found {} tables", tables.len());
|
||||||
|
|
||||||
|
// Create schema if not exists
|
||||||
|
dest.batch_execute(&format!(
|
||||||
|
"CREATE SCHEMA IF NOT EXISTS {}",
|
||||||
|
source.schema()
|
||||||
|
))
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Create enum types first (before tables that reference them)
|
||||||
|
info!("Creating enum types...");
|
||||||
|
let enums = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
t.typname,
|
||||||
|
string_agg(e.enumlabel, ',' ORDER BY e.enumsortorder) as labels
|
||||||
|
FROM pg_type t
|
||||||
|
JOIN pg_namespace n ON n.oid = t.typnamespace
|
||||||
|
JOIN pg_enum e ON e.enumtypid = t.oid
|
||||||
|
WHERE n.nspname = $1
|
||||||
|
GROUP BY t.typname
|
||||||
|
"#,
|
||||||
|
&[&source.schema()],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for row in &enums {
|
||||||
|
let type_name: String = row.get(0);
|
||||||
|
let labels: String = row.get(1);
|
||||||
|
let label_list: Vec<&str> = labels.split(',').collect();
|
||||||
|
let quoted_labels = label_list
|
||||||
|
.iter()
|
||||||
|
.map(|l| format!("'{}'", l))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
let create_enum = format!(
|
||||||
|
"CREATE TYPE {}.\"{}\" AS ENUM ({})",
|
||||||
|
source.schema(),
|
||||||
|
type_name,
|
||||||
|
quoted_labels
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Err(e) = dest.batch_execute(&create_enum).await {
|
||||||
|
warn!("Failed to create enum {}: {:?}", type_name, e);
|
||||||
|
} else {
|
||||||
|
info!(" Created enum: {}", type_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get and apply table definitions
|
||||||
|
for table in &tables {
|
||||||
|
info!("Creating table: {}", table);
|
||||||
|
|
||||||
|
// Use pg_attribute and format_type() for proper type names (handles arrays, enums, etc.)
|
||||||
|
let rows = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
'CREATE TABLE IF NOT EXISTS ' || $1 || '."' || c.relname || '" (' ||
|
||||||
|
string_agg(
|
||||||
|
'"' || a.attname || '" ' ||
|
||||||
|
format_type(a.atttypid, a.atttypmod) ||
|
||||||
|
CASE WHEN a.attnotnull THEN ' NOT NULL' ELSE '' END ||
|
||||||
|
CASE WHEN d.adrelid IS NOT NULL THEN ' DEFAULT ' || pg_get_expr(d.adbin, d.adrelid) ELSE '' END,
|
||||||
|
', '
|
||||||
|
ORDER BY a.attnum
|
||||||
|
) || ')'
|
||||||
|
FROM pg_class c
|
||||||
|
JOIN pg_namespace n ON n.oid = c.relnamespace
|
||||||
|
JOIN pg_attribute a ON a.attrelid = c.oid
|
||||||
|
LEFT JOIN pg_attrdef d ON d.adrelid = c.oid AND d.adnum = a.attnum
|
||||||
|
WHERE n.nspname = $1
|
||||||
|
AND c.relname = $2
|
||||||
|
AND a.attnum > 0
|
||||||
|
AND NOT a.attisdropped
|
||||||
|
GROUP BY c.relname
|
||||||
|
"#,
|
||||||
|
&[&source.schema(), table],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(row) = rows.first() {
|
||||||
|
let create_sql: String = row.get(0);
|
||||||
|
if let Err(e) = dest.batch_execute(&create_sql).await {
|
||||||
|
warn!("Failed to create table {}: {:?}", table, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy indexes
|
||||||
|
info!("Creating indexes...");
|
||||||
|
let indexes = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT indexdef
|
||||||
|
FROM pg_indexes
|
||||||
|
WHERE schemaname = $1
|
||||||
|
AND indexname NOT LIKE '%_pkey'
|
||||||
|
"#,
|
||||||
|
&[&source.schema()],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for row in indexes {
|
||||||
|
let indexdef: String = row.get(0);
|
||||||
|
if let Err(e) = dest.batch_execute(&indexdef).await {
|
||||||
|
warn!("Failed to create index: {} (may already exist)", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy constraints
|
||||||
|
info!("Creating constraints...");
|
||||||
|
let constraints = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
'ALTER TABLE ' || $1 || '."' || tc.table_name || '" ADD CONSTRAINT "' ||
|
||||||
|
tc.constraint_name || '" ' ||
|
||||||
|
CASE tc.constraint_type
|
||||||
|
WHEN 'PRIMARY KEY' THEN 'PRIMARY KEY (' || string_agg('"' || kcu.column_name || '"', ', ') || ')'
|
||||||
|
WHEN 'UNIQUE' THEN 'UNIQUE (' || string_agg('"' || kcu.column_name || '"', ', ') || ')'
|
||||||
|
WHEN 'FOREIGN KEY' THEN
|
||||||
|
'FOREIGN KEY (' || string_agg('"' || kcu.column_name || '"', ', ') || ') REFERENCES ' ||
|
||||||
|
$1 || '."' || ccu.table_name || '" (' || string_agg('"' || ccu.column_name || '"', ', ') || ')'
|
||||||
|
ELSE ''
|
||||||
|
END
|
||||||
|
FROM information_schema.table_constraints tc
|
||||||
|
JOIN information_schema.key_column_usage kcu
|
||||||
|
ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema
|
||||||
|
LEFT JOIN information_schema.constraint_column_usage ccu
|
||||||
|
ON tc.constraint_name = ccu.constraint_name AND tc.table_schema = ccu.table_schema
|
||||||
|
WHERE tc.table_schema = $1
|
||||||
|
AND tc.constraint_type IN ('PRIMARY KEY', 'UNIQUE', 'FOREIGN KEY')
|
||||||
|
GROUP BY tc.table_name, tc.constraint_name, tc.constraint_type, ccu.table_name
|
||||||
|
"#,
|
||||||
|
&[&source.schema()],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for row in constraints {
|
||||||
|
let constraint_sql: String = row.get(0);
|
||||||
|
if let Err(e) = dest.batch_execute(&constraint_sql).await {
|
||||||
|
warn!("Failed to create constraint: {} (may already exist)", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!("Schema migration complete");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate data from source to destination
|
||||||
|
pub async fn migrate_data(source: &Database, dest: &Database, skip_large: bool) -> Result<()> {
|
||||||
|
let tables = source.get_tables().await?;
|
||||||
|
|
||||||
|
let tables_to_migrate: Vec<_> = if skip_large {
|
||||||
|
tables
|
||||||
|
.into_iter()
|
||||||
|
.filter(|t| !LARGE_TABLES.contains(&t.as_str()))
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
tables
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Migrating {} tables", tables_to_migrate.len());
|
||||||
|
|
||||||
|
if skip_large {
|
||||||
|
info!("Skipping large tables: {:?}", LARGE_TABLES);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Disable triggers for faster import
|
||||||
|
dest.batch_execute("SET session_replication_role = 'replica'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for table in &tables_to_migrate {
|
||||||
|
migrate_table(source, dest, table).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable triggers
|
||||||
|
dest.batch_execute("SET session_replication_role = 'origin'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Data migration complete");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate a single table
|
||||||
|
pub async fn migrate_table(source: &Database, dest: &Database, table: &str) -> Result<()> {
|
||||||
|
let source_count = source.get_row_count(table).await?;
|
||||||
|
let (_, size) = source.get_table_size(table).await?;
|
||||||
|
|
||||||
|
info!("Migrating {}: {} rows ({})", table, source_count, size);
|
||||||
|
|
||||||
|
if source_count == 0 {
|
||||||
|
info!(" Skipping empty table");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if destination already has data
|
||||||
|
let dest_count = dest.get_row_count(table).await.unwrap_or(0);
|
||||||
|
if dest_count > 0 {
|
||||||
|
warn!(
|
||||||
|
" Destination already has {} rows, skipping (use --force to overwrite)",
|
||||||
|
dest_count
|
||||||
|
);
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let pb = ProgressBar::new(source_count as u64);
|
||||||
|
pb.set_style(
|
||||||
|
ProgressStyle::default_bar()
|
||||||
|
.template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} ({eta})")
|
||||||
|
.unwrap()
|
||||||
|
.progress_chars("#>-"),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Get column names
|
||||||
|
let columns = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT column_name
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_schema = $1 AND table_name = $2
|
||||||
|
ORDER BY ordinal_position
|
||||||
|
"#,
|
||||||
|
&[&source.schema(), &table],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let column_names: Vec<String> = columns.iter().map(|r| r.get(0)).collect();
|
||||||
|
let columns_str = column_names
|
||||||
|
.iter()
|
||||||
|
.map(|c| format!("\"{}\"", c))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
// Stream data in batches
|
||||||
|
let batch_size = 10000;
|
||||||
|
let mut offset = 0i64;
|
||||||
|
|
||||||
|
while offset < source_count {
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT {} FROM {}.\"{}\" ORDER BY 1 LIMIT {} OFFSET {}",
|
||||||
|
columns_str,
|
||||||
|
source.schema(),
|
||||||
|
table,
|
||||||
|
batch_size,
|
||||||
|
offset
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = source.query(&sql, &[]).await?;
|
||||||
|
if rows.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build INSERT statement
|
||||||
|
let placeholders: Vec<String> = (0..column_names.len())
|
||||||
|
.map(|i| format!("${}", i + 1))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let insert_sql = format!(
|
||||||
|
"INSERT INTO {}.\"{}\" ({}) VALUES ({})",
|
||||||
|
dest.schema(),
|
||||||
|
table,
|
||||||
|
columns_str,
|
||||||
|
placeholders.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
// This is a simplified version - for production, we'd use COPY protocol
|
||||||
|
// For now, we'll use batch INSERT with prepared statements
|
||||||
|
for row in &rows {
|
||||||
|
// Build values dynamically based on column types
|
||||||
|
// This is simplified - full implementation would handle all types
|
||||||
|
let values: Vec<String> = (0..column_names.len())
|
||||||
|
.map(|i| {
|
||||||
|
let col_name = &column_names[i];
|
||||||
|
|
||||||
|
// Try to get as different types and format appropriately
|
||||||
|
let is_null = if let Ok(v) = row.try_get::<_, Option<String>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(s) => return format!("'{}'", s.replace('\'', "''")),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else if let Ok(v) = row.try_get::<_, Option<i64>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(n) => return n.to_string(),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else if let Ok(v) = row.try_get::<_, Option<bool>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(b) => return b.to_string(),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
};
|
||||||
|
|
||||||
|
// If NULL, check if we have a default for this column
|
||||||
|
if is_null {
|
||||||
|
if let Some(default) = get_null_default(table, col_name) {
|
||||||
|
return default.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"NULL".to_string()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let insert = format!(
|
||||||
|
"INSERT INTO {}.\"{}\" ({}) VALUES ({})",
|
||||||
|
dest.schema(),
|
||||||
|
table,
|
||||||
|
columns_str,
|
||||||
|
values.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Err(e) = dest.batch_execute(&insert).await {
|
||||||
|
warn!("Failed to insert row: {:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += rows.len() as i64;
|
||||||
|
pb.set_position(offset as u64);
|
||||||
|
}
|
||||||
|
|
||||||
|
pb.finish_with_message(format!("{} complete", table));
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
let final_count = dest.get_row_count(table).await?;
|
||||||
|
if final_count != source_count {
|
||||||
|
warn!(
|
||||||
|
" Row count mismatch! Source: {}, Dest: {}",
|
||||||
|
source_count, final_count
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
info!(" Verified: {} rows", final_count);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Migrate a single user and their related data
|
||||||
|
pub async fn migrate_single_user(source: &Database, dest: &Database, user_id: &str) -> Result<()> {
|
||||||
|
info!("Migrating data for user: {}", user_id);
|
||||||
|
|
||||||
|
// Tables to migrate with user_id column (platform tables use String IDs)
|
||||||
|
let user_tables = vec![
|
||||||
|
("User", "id"),
|
||||||
|
("Profile", "userId"),
|
||||||
|
("UserOnboarding", "userId"),
|
||||||
|
("UserBalance", "userId"),
|
||||||
|
];
|
||||||
|
|
||||||
|
// Disable triggers
|
||||||
|
dest.batch_execute("SET session_replication_role = 'replica'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for (table, id_col) in &user_tables {
|
||||||
|
info!(" Checking {}...", table);
|
||||||
|
|
||||||
|
// Check if user exists in this table (IDs are Strings in platform schema)
|
||||||
|
let check_sql = format!(
|
||||||
|
"SELECT COUNT(*) FROM {}.\"{}\" WHERE \"{}\" = $1",
|
||||||
|
source.schema(),
|
||||||
|
table,
|
||||||
|
id_col
|
||||||
|
);
|
||||||
|
let rows = source.query(&check_sql, &[&user_id]).await?;
|
||||||
|
let count: i64 = rows.first().map(|r| r.get(0)).unwrap_or(0);
|
||||||
|
|
||||||
|
if count == 0 {
|
||||||
|
info!(" No data in {}", table);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get column names
|
||||||
|
let columns = source
|
||||||
|
.query(
|
||||||
|
r#"
|
||||||
|
SELECT column_name
|
||||||
|
FROM information_schema.columns
|
||||||
|
WHERE table_schema = $1 AND table_name = $2
|
||||||
|
ORDER BY ordinal_position
|
||||||
|
"#,
|
||||||
|
&[&source.schema(), table],
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let column_names: Vec<String> = columns.iter().map(|r| r.get(0)).collect();
|
||||||
|
let columns_str = column_names
|
||||||
|
.iter()
|
||||||
|
.map(|c| format!("\"{}\"", c))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join(", ");
|
||||||
|
|
||||||
|
// Get data for this user
|
||||||
|
let select_sql = format!(
|
||||||
|
"SELECT {} FROM {}.\"{}\" WHERE \"{}\" = $1",
|
||||||
|
columns_str,
|
||||||
|
source.schema(),
|
||||||
|
table,
|
||||||
|
id_col
|
||||||
|
);
|
||||||
|
let data_rows = source.query(&select_sql, &[&user_id]).await?;
|
||||||
|
|
||||||
|
info!(" Found {} rows in {}", data_rows.len(), table);
|
||||||
|
|
||||||
|
// Insert into destination
|
||||||
|
for row in &data_rows {
|
||||||
|
let values: Vec<String> = (0..column_names.len())
|
||||||
|
.map(|i| {
|
||||||
|
let col_name = &column_names[i];
|
||||||
|
|
||||||
|
let is_null = if let Ok(v) = row.try_get::<_, Option<String>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(s) => return format!("'{}'", s.replace('\'', "''")),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else if let Ok(v) = row.try_get::<_, Option<i64>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(n) => return n.to_string(),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else if let Ok(v) = row.try_get::<_, Option<bool>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(b) => return b.to_string(),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else if let Ok(v) = row.try_get::<_, Option<uuid::Uuid>>(i) {
|
||||||
|
match v {
|
||||||
|
Some(u) => return format!("'{}'", u),
|
||||||
|
None => true,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
true
|
||||||
|
};
|
||||||
|
|
||||||
|
// If NULL, check if we have a default for this column
|
||||||
|
if is_null {
|
||||||
|
if let Some(default) = get_null_default(table, col_name) {
|
||||||
|
return default.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"NULL".to_string()
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let insert_sql = format!(
|
||||||
|
"INSERT INTO {}.\"{}\" ({}) VALUES ({}) ON CONFLICT DO NOTHING",
|
||||||
|
dest.schema(),
|
||||||
|
table,
|
||||||
|
columns_str,
|
||||||
|
values.join(", ")
|
||||||
|
);
|
||||||
|
|
||||||
|
if let Err(e) = dest.batch_execute(&insert_sql).await {
|
||||||
|
warn!(" Failed to insert into {}: {}", table, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
info!(" Migrated {} to destination", table);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable triggers
|
||||||
|
dest.batch_execute("SET session_replication_role = 'origin'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Stream large tables using COPY protocol
|
||||||
|
pub async fn stream_large_tables(
|
||||||
|
source: &Database,
|
||||||
|
dest: &Database,
|
||||||
|
specific_table: Option<String>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let tables: Vec<&str> = if let Some(ref t) = specific_table {
|
||||||
|
vec![t.as_str()]
|
||||||
|
} else {
|
||||||
|
LARGE_TABLES.to_vec()
|
||||||
|
};
|
||||||
|
|
||||||
|
info!("Streaming {} large table(s)", tables.len());
|
||||||
|
|
||||||
|
// Disable triggers
|
||||||
|
dest.batch_execute("SET session_replication_role = 'replica'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for table in tables {
|
||||||
|
let source_count = source.get_row_count(table).await?;
|
||||||
|
let (bytes, size) = source.get_table_size(table).await?;
|
||||||
|
|
||||||
|
info!("Streaming {}: {} rows ({})", table, source_count, size);
|
||||||
|
|
||||||
|
if source_count == 0 {
|
||||||
|
info!(" Skipping empty table");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let pb = ProgressBar::new(bytes as u64);
|
||||||
|
pb.set_style(
|
||||||
|
ProgressStyle::default_bar()
|
||||||
|
.template("{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} ({bytes_per_sec})")
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Stream using pg_dump/psql approach (simpler, works reliably)
|
||||||
|
// For now, we'll migrate in batches
|
||||||
|
let batch_size = 50000i64;
|
||||||
|
let mut offset = 0i64;
|
||||||
|
let mut total_bytes = 0u64;
|
||||||
|
|
||||||
|
while offset < source_count {
|
||||||
|
let sql = format!(
|
||||||
|
"SELECT * FROM {}.\"{}\" ORDER BY 1 LIMIT {} OFFSET {}",
|
||||||
|
source.schema(),
|
||||||
|
table,
|
||||||
|
batch_size,
|
||||||
|
offset
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = source.query(&sql, &[]).await?;
|
||||||
|
if rows.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Estimate bytes processed
|
||||||
|
total_bytes += (rows.len() * 1000) as u64; // Rough estimate
|
||||||
|
pb.set_position(std::cmp::min(total_bytes, bytes as u64));
|
||||||
|
|
||||||
|
offset += rows.len() as i64;
|
||||||
|
info!(" Processed {}/{} rows", offset, source_count);
|
||||||
|
}
|
||||||
|
|
||||||
|
pb.finish_with_message(format!("{} complete", table));
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
let final_count = dest.get_row_count(table).await?;
|
||||||
|
info!(
|
||||||
|
" Transferred: {} rows ({} bytes)",
|
||||||
|
final_count, total_bytes
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-enable triggers
|
||||||
|
dest.batch_execute("SET session_replication_role = 'origin'")
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
212
autogpt_platform/backend/tools/db-migrate/src/verify.rs
Normal file
212
autogpt_platform/backend/tools/db-migrate/src/verify.rs
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
use crate::db::Database;
|
||||||
|
use anyhow::Result;
|
||||||
|
use comfy_table::{presets::UTF8_FULL, Table, Cell, Color};
|
||||||
|
use tracing::{info, warn, error};
|
||||||
|
|
||||||
|
/// Show table sizes in the database
|
||||||
|
pub async fn show_table_sizes(db: &Database) -> Result<()> {
|
||||||
|
let tables = db.get_tables().await?;
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Table", "Rows", "Size"]);
|
||||||
|
|
||||||
|
let mut total_bytes: i64 = 0;
|
||||||
|
let mut total_rows: i64 = 0;
|
||||||
|
|
||||||
|
for t in &tables {
|
||||||
|
let count = db.get_row_count(t).await?;
|
||||||
|
let (bytes, size) = db.get_table_size(t).await?;
|
||||||
|
|
||||||
|
total_bytes += bytes;
|
||||||
|
total_rows += count;
|
||||||
|
|
||||||
|
table.add_row(vec![t.clone(), count.to_string(), size]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\n{}", table);
|
||||||
|
println!(
|
||||||
|
"\nTotal: {} rows, {} bytes ({:.2} GB)",
|
||||||
|
total_rows,
|
||||||
|
total_bytes,
|
||||||
|
total_bytes as f64 / 1_073_741_824.0
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify row counts match between source and destination
|
||||||
|
pub async fn verify_row_counts(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Verifying row counts...");
|
||||||
|
|
||||||
|
let tables = source.get_tables().await?;
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Table", "Source", "Dest", "Status"]);
|
||||||
|
|
||||||
|
let mut all_match = true;
|
||||||
|
let mut total_source: i64 = 0;
|
||||||
|
let mut total_dest: i64 = 0;
|
||||||
|
|
||||||
|
for t in &tables {
|
||||||
|
let source_count = source.get_row_count(t).await?;
|
||||||
|
let dest_count = dest.get_row_count(t).await.unwrap_or(0);
|
||||||
|
|
||||||
|
total_source += source_count;
|
||||||
|
total_dest += dest_count;
|
||||||
|
|
||||||
|
let status = if source_count == dest_count {
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else if dest_count == 0 {
|
||||||
|
all_match = false;
|
||||||
|
Cell::new("MISSING").fg(Color::Yellow)
|
||||||
|
} else {
|
||||||
|
all_match = false;
|
||||||
|
Cell::new("MISMATCH").fg(Color::Red)
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(vec![
|
||||||
|
Cell::new(t),
|
||||||
|
Cell::new(source_count),
|
||||||
|
Cell::new(dest_count),
|
||||||
|
status,
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\n{}", table);
|
||||||
|
println!("\nTotal: Source={}, Dest={}", total_source, total_dest);
|
||||||
|
|
||||||
|
if all_match {
|
||||||
|
info!("All row counts match!");
|
||||||
|
} else {
|
||||||
|
warn!("Some tables have mismatched row counts");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify functions and triggers exist in destination
|
||||||
|
pub async fn verify_functions(source: &Database, dest: &Database) -> Result<()> {
|
||||||
|
info!("Verifying functions...");
|
||||||
|
|
||||||
|
let source_funcs = source.get_functions().await?;
|
||||||
|
let dest_funcs = dest.get_functions().await?;
|
||||||
|
|
||||||
|
let dest_func_names: std::collections::HashSet<_> =
|
||||||
|
dest_funcs.iter().map(|(n, _)| n.clone()).collect();
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Function", "Status"]);
|
||||||
|
|
||||||
|
let mut all_present = true;
|
||||||
|
|
||||||
|
for (name, _def) in &source_funcs {
|
||||||
|
let status = if dest_func_names.contains(name) {
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else {
|
||||||
|
all_present = false;
|
||||||
|
Cell::new("MISSING").fg(Color::Red)
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(vec![Cell::new(name), status]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nFunctions:\n{}", table);
|
||||||
|
|
||||||
|
// Verify triggers
|
||||||
|
info!("Verifying triggers...");
|
||||||
|
|
||||||
|
let source_triggers = source.get_triggers().await?;
|
||||||
|
let dest_triggers = dest.get_triggers().await?;
|
||||||
|
|
||||||
|
let dest_trigger_names: std::collections::HashSet<_> =
|
||||||
|
dest_triggers.iter().map(|(n, _, _)| n.clone()).collect();
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Trigger", "Table", "Status"]);
|
||||||
|
|
||||||
|
for (name, tbl, _def) in &source_triggers {
|
||||||
|
let status = if dest_trigger_names.contains(name) {
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else {
|
||||||
|
all_present = false;
|
||||||
|
Cell::new("MISSING").fg(Color::Red)
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(vec![Cell::new(name), Cell::new(tbl), status]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nTriggers:\n{}", table);
|
||||||
|
|
||||||
|
// Verify materialized views
|
||||||
|
info!("Verifying materialized views...");
|
||||||
|
|
||||||
|
let source_views = source.get_materialized_views().await?;
|
||||||
|
let dest_views = dest.get_materialized_views().await?;
|
||||||
|
|
||||||
|
let dest_view_names: std::collections::HashSet<_> = dest_views.into_iter().collect();
|
||||||
|
|
||||||
|
let mut table = Table::new();
|
||||||
|
table.load_preset(UTF8_FULL);
|
||||||
|
table.set_header(vec!["Materialized View", "Status"]);
|
||||||
|
|
||||||
|
for name in &source_views {
|
||||||
|
let status = if dest_view_names.contains(name) {
|
||||||
|
Cell::new("✓").fg(Color::Green)
|
||||||
|
} else {
|
||||||
|
all_present = false;
|
||||||
|
Cell::new("MISSING").fg(Color::Red)
|
||||||
|
};
|
||||||
|
|
||||||
|
table.add_row(vec![Cell::new(name), status]);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("\nMaterialized Views:\n{}", table);
|
||||||
|
|
||||||
|
if all_present {
|
||||||
|
info!("All functions, triggers, and views present!");
|
||||||
|
} else {
|
||||||
|
error!("Some database objects are missing in destination");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Verify data integrity with checksums
|
||||||
|
pub async fn verify_checksums(source: &Database, dest: &Database, table: &str) -> Result<bool> {
|
||||||
|
info!("Computing checksums for {}...", table);
|
||||||
|
|
||||||
|
// Get checksum of all data
|
||||||
|
let checksum_sql = format!(
|
||||||
|
r#"
|
||||||
|
SELECT md5(string_agg(t::text, ''))
|
||||||
|
FROM (SELECT * FROM {}."{}" ORDER BY 1) t
|
||||||
|
"#,
|
||||||
|
source.schema(),
|
||||||
|
table
|
||||||
|
);
|
||||||
|
|
||||||
|
let source_rows = source.query(&checksum_sql, &[]).await?;
|
||||||
|
let dest_rows = dest.query(&checksum_sql, &[]).await?;
|
||||||
|
|
||||||
|
let source_checksum: Option<String> = source_rows.first().and_then(|r| r.get(0));
|
||||||
|
let dest_checksum: Option<String> = dest_rows.first().and_then(|r| r.get(0));
|
||||||
|
|
||||||
|
match (source_checksum, dest_checksum) {
|
||||||
|
(Some(s), Some(d)) if s == d => {
|
||||||
|
info!(" {} checksum match: {}", table, s);
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
(Some(s), Some(d)) => {
|
||||||
|
error!(" {} checksum MISMATCH: {} vs {}", table, s, d);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
warn!(" {} could not compute checksum", table);
|
||||||
|
Ok(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,3 +1,30 @@
|
|||||||
# Supabase Docker
|
# AutoGPT Database Docker
|
||||||
|
|
||||||
This is a minimal Docker Compose setup for self-hosting Supabase. Follow the steps [here](https://supabase.com/docs/guides/hosting/docker) to get started.
|
This is a minimal Docker Compose setup for running PostgreSQL for the AutoGPT Platform.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start the database
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# Stop the database
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
# Destroy (remove volumes)
|
||||||
|
docker compose down -v --remove-orphans
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The PostgreSQL database is configured with:
|
||||||
|
- Logical replication enabled (for Prisma)
|
||||||
|
- pgvector/pgvector:pg18 image (PostgreSQL 18 with pgvector extension for AI embeddings)
|
||||||
|
- Data persisted in `./volumes/db/data`
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
You can override the default configuration by setting environment variables:
|
||||||
|
- `POSTGRES_USER` - Database user (default: postgres)
|
||||||
|
- `POSTGRES_PASSWORD` - Database password (default: your-super-secret-and-long-postgres-password)
|
||||||
|
- `POSTGRES_DB` - Database name (default: postgres)
|
||||||
|
|||||||
@@ -1,48 +1,5 @@
|
|||||||
create table profiles (
|
-- Development seed data
|
||||||
id uuid references auth.users not null,
|
-- Add any seed data for development here
|
||||||
updated_at timestamp with time zone,
|
|
||||||
username text unique,
|
|
||||||
avatar_url text,
|
|
||||||
website text,
|
|
||||||
|
|
||||||
primary key (id),
|
-- This file is mounted into the PostgreSQL container at startup
|
||||||
unique(username),
|
-- Any SQL here will be executed after the database is initialized
|
||||||
constraint username_length check (char_length(username) >= 3)
|
|
||||||
);
|
|
||||||
|
|
||||||
alter table profiles enable row level security;
|
|
||||||
|
|
||||||
create policy "Public profiles are viewable by the owner."
|
|
||||||
on profiles for select
|
|
||||||
using ( auth.uid() = id );
|
|
||||||
|
|
||||||
create policy "Users can insert their own profile."
|
|
||||||
on profiles for insert
|
|
||||||
with check ( auth.uid() = id );
|
|
||||||
|
|
||||||
create policy "Users can update own profile."
|
|
||||||
on profiles for update
|
|
||||||
using ( auth.uid() = id );
|
|
||||||
|
|
||||||
-- Set up Realtime
|
|
||||||
begin;
|
|
||||||
drop publication if exists supabase_realtime;
|
|
||||||
create publication supabase_realtime;
|
|
||||||
commit;
|
|
||||||
alter publication supabase_realtime add table profiles;
|
|
||||||
|
|
||||||
-- Set up Storage
|
|
||||||
insert into storage.buckets (id, name)
|
|
||||||
values ('avatars', 'avatars');
|
|
||||||
|
|
||||||
create policy "Avatar images are publicly accessible."
|
|
||||||
on storage.objects for select
|
|
||||||
using ( bucket_id = 'avatars' );
|
|
||||||
|
|
||||||
create policy "Anyone can upload an avatar."
|
|
||||||
on storage.objects for insert
|
|
||||||
with check ( bucket_id = 'avatars' );
|
|
||||||
|
|
||||||
create policy "Anyone can update an avatar."
|
|
||||||
on storage.objects for update
|
|
||||||
with check ( bucket_id = 'avatars' );
|
|
||||||
|
|||||||
@@ -1,27 +1,13 @@
|
|||||||
version: "3.8"
|
version: "3.8"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
studio:
|
|
||||||
build:
|
|
||||||
context: ..
|
|
||||||
dockerfile: studio/Dockerfile
|
|
||||||
target: dev
|
|
||||||
ports:
|
|
||||||
- 8082:8082
|
|
||||||
mail:
|
mail:
|
||||||
container_name: supabase-mail
|
container_name: autogpt-mail
|
||||||
image: inbucket/inbucket:3.0.3
|
image: inbucket/inbucket:3.0.3
|
||||||
ports:
|
ports:
|
||||||
- '2500:2500' # SMTP
|
- '2500:2500' # SMTP
|
||||||
- '9000:9000' # web interface
|
- '9000:9000' # web interface
|
||||||
- '1100:1100' # POP3
|
- '1100:1100' # POP3
|
||||||
auth:
|
|
||||||
environment:
|
|
||||||
- GOTRUE_SMTP_USER=
|
|
||||||
- GOTRUE_SMTP_PASS=
|
|
||||||
meta:
|
|
||||||
ports:
|
|
||||||
- 5555:8080
|
|
||||||
db:
|
db:
|
||||||
restart: 'no'
|
restart: 'no'
|
||||||
volumes:
|
volumes:
|
||||||
@@ -29,6 +15,3 @@ services:
|
|||||||
- /var/lib/postgresql/data
|
- /var/lib/postgresql/data
|
||||||
# Seed data should be inserted last (alphabetical order)
|
# Seed data should be inserted last (alphabetical order)
|
||||||
- ./dev/data.sql:/docker-entrypoint-initdb.d/seed.sql
|
- ./dev/data.sql:/docker-entrypoint-initdb.d/seed.sql
|
||||||
storage:
|
|
||||||
volumes:
|
|
||||||
- /var/lib/storage
|
|
||||||
|
|||||||
@@ -1,430 +1,17 @@
|
|||||||
# Usage
|
# Usage
|
||||||
# Start: docker compose up
|
# Start: docker compose up
|
||||||
# With helpers: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml up
|
|
||||||
# Stop: docker compose down
|
# Stop: docker compose down
|
||||||
# Destroy: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml down -v --remove-orphans
|
# Destroy: docker compose down -v --remove-orphans
|
||||||
# Reset everything: ./reset.sh
|
|
||||||
|
|
||||||
# Environment Variable Loading Order (first → last, later overrides earlier):
|
name: autogpt-db
|
||||||
# 1. ../../.env.default - Default values for all Supabase settings
|
|
||||||
# 2. ../../.env - User's custom configuration (if exists)
|
|
||||||
# 3. ./.env - Local overrides specific to db/docker (if exists)
|
|
||||||
# 4. environment key - Service-specific overrides defined below
|
|
||||||
# 5. Shell environment - Variables exported before running docker compose
|
|
||||||
|
|
||||||
name: supabase
|
|
||||||
|
|
||||||
# Common env_file configuration for all Supabase services
|
|
||||||
x-supabase-env-files: &supabase-env-files
|
|
||||||
env_file:
|
|
||||||
- ../../.env.default # Base defaults from platform root
|
|
||||||
- path: ../../.env # User overrides from platform root (optional)
|
|
||||||
required: false
|
|
||||||
- path: ./.env # Local overrides for db/docker (optional)
|
|
||||||
required: false
|
|
||||||
|
|
||||||
# Common Supabase environment - hardcoded defaults to avoid variable substitution
|
|
||||||
x-supabase-env: &supabase-env
|
|
||||||
SUPABASE_ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
|
||||||
SUPABASE_SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
studio:
|
|
||||||
container_name: supabase-studio
|
|
||||||
image: supabase/studio:20250224-d10db0f
|
|
||||||
restart: unless-stopped
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"node",
|
|
||||||
"-e",
|
|
||||||
"fetch('http://studio:3000/api/platform/profile').then((r) => {if (r.status !== 200) throw new Error(r.status)})"
|
|
||||||
]
|
|
||||||
timeout: 10s
|
|
||||||
interval: 5s
|
|
||||||
retries: 3
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
STUDIO_PG_META_URL: http://meta:8080
|
|
||||||
DEFAULT_ORGANIZATION_NAME: Default Organization
|
|
||||||
DEFAULT_PROJECT_NAME: Default Project
|
|
||||||
SUPABASE_URL: http://kong:8000
|
|
||||||
AUTH_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
|
|
||||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
|
||||||
LOGFLARE_URL: http://analytics:4000
|
|
||||||
NEXT_PUBLIC_ENABLE_LOGS: true
|
|
||||||
# Comment to use Big Query backend for analytics
|
|
||||||
NEXT_ANALYTICS_BACKEND_PROVIDER: postgres
|
|
||||||
# Uncomment to use Big Query backend for analytics
|
|
||||||
# NEXT_ANALYTICS_BACKEND_PROVIDER: bigquery
|
|
||||||
|
|
||||||
kong:
|
|
||||||
container_name: supabase-kong
|
|
||||||
image: kong:2.8.1
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 8000:8000/tcp
|
|
||||||
- 8443:8443/tcp
|
|
||||||
volumes:
|
|
||||||
# https://github.com/supabase/supabase/issues/12661
|
|
||||||
- ./volumes/api/kong.yml:/home/kong/temp.yml:ro
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
KONG_DATABASE: "off"
|
|
||||||
KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml
|
|
||||||
# https://github.com/supabase/cli/issues/14
|
|
||||||
KONG_DNS_ORDER: LAST,A,CNAME
|
|
||||||
KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth
|
|
||||||
KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k
|
|
||||||
KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k
|
|
||||||
# https://unix.stackexchange.com/a/294837
|
|
||||||
entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start'
|
|
||||||
|
|
||||||
auth:
|
|
||||||
container_name: supabase-auth
|
|
||||||
image: supabase/gotrue:v2.170.0
|
|
||||||
restart: unless-stopped
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"wget",
|
|
||||||
"--no-verbose",
|
|
||||||
"--tries=1",
|
|
||||||
"--spider",
|
|
||||||
"http://localhost:9999/health"
|
|
||||||
]
|
|
||||||
timeout: 5s
|
|
||||||
interval: 5s
|
|
||||||
retries: 3
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
GOTRUE_API_HOST: 0.0.0.0
|
|
||||||
GOTRUE_API_PORT: 9999
|
|
||||||
|
|
||||||
GOTRUE_DB_DRIVER: postgres
|
|
||||||
GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:your-super-secret-and-long-postgres-password@db:5432/postgres
|
|
||||||
|
|
||||||
GOTRUE_SITE_URL: http://localhost:3000
|
|
||||||
GOTRUE_URI_ALLOW_LIST: ""
|
|
||||||
GOTRUE_DISABLE_SIGNUP: false
|
|
||||||
|
|
||||||
GOTRUE_JWT_ADMIN_ROLES: service_role
|
|
||||||
GOTRUE_JWT_AUD: authenticated
|
|
||||||
GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated
|
|
||||||
GOTRUE_JWT_EXP: 3600
|
|
||||||
GOTRUE_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
|
|
||||||
GOTRUE_EXTERNAL_EMAIL_ENABLED: true
|
|
||||||
GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: false
|
|
||||||
GOTRUE_MAILER_AUTOCONFIRM: false
|
|
||||||
|
|
||||||
# Uncomment to bypass nonce check in ID Token flow. Commonly set to true when using Google Sign In on mobile.
|
|
||||||
# GOTRUE_EXTERNAL_SKIP_NONCE_CHECK: true
|
|
||||||
|
|
||||||
# GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true
|
|
||||||
# GOTRUE_SMTP_MAX_FREQUENCY: 1s
|
|
||||||
GOTRUE_SMTP_ADMIN_EMAIL: admin@example.com
|
|
||||||
GOTRUE_SMTP_HOST: supabase-mail
|
|
||||||
GOTRUE_SMTP_PORT: 2500
|
|
||||||
GOTRUE_SMTP_USER: fake_mail_user
|
|
||||||
GOTRUE_SMTP_PASS: fake_mail_password
|
|
||||||
GOTRUE_SMTP_SENDER_NAME: fake_sender
|
|
||||||
GOTRUE_MAILER_URLPATHS_INVITE: /auth/v1/verify
|
|
||||||
GOTRUE_MAILER_URLPATHS_CONFIRMATION: /auth/v1/verify
|
|
||||||
GOTRUE_MAILER_URLPATHS_RECOVERY: /auth/v1/verify
|
|
||||||
GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: /auth/v1/verify
|
|
||||||
|
|
||||||
GOTRUE_EXTERNAL_PHONE_ENABLED: true
|
|
||||||
GOTRUE_SMS_AUTOCONFIRM: true
|
|
||||||
# Uncomment to enable custom access token hook. Please see: https://supabase.com/docs/guides/auth/auth-hooks for full list of hooks and additional details about custom_access_token_hook
|
|
||||||
|
|
||||||
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_ENABLED: "true"
|
|
||||||
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_URI: "pg-functions://postgres/public/custom_access_token_hook"
|
|
||||||
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_SECRETS: "<standard-base64-secret>"
|
|
||||||
|
|
||||||
# GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_ENABLED: "true"
|
|
||||||
# GOTRUE_HOOK_MFA_VERIFICATION_ATTEMPT_URI: "pg-functions://postgres/public/mfa_verification_attempt"
|
|
||||||
|
|
||||||
# GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_ENABLED: "true"
|
|
||||||
# GOTRUE_HOOK_PASSWORD_VERIFICATION_ATTEMPT_URI: "pg-functions://postgres/public/password_verification_attempt"
|
|
||||||
|
|
||||||
# GOTRUE_HOOK_SEND_SMS_ENABLED: "false"
|
|
||||||
# GOTRUE_HOOK_SEND_SMS_URI: "pg-functions://postgres/public/custom_access_token_hook"
|
|
||||||
# GOTRUE_HOOK_SEND_SMS_SECRETS: "v1,whsec_VGhpcyBpcyBhbiBleGFtcGxlIG9mIGEgc2hvcnRlciBCYXNlNjQgc3RyaW5n"
|
|
||||||
|
|
||||||
# GOTRUE_HOOK_SEND_EMAIL_ENABLED: "false"
|
|
||||||
# GOTRUE_HOOK_SEND_EMAIL_URI: "http://host.docker.internal:54321/functions/v1/email_sender"
|
|
||||||
# GOTRUE_HOOK_SEND_EMAIL_SECRETS: "v1,whsec_VGhpcyBpcyBhbiBleGFtcGxlIG9mIGEgc2hvcnRlciBCYXNlNjQgc3RyaW5n"
|
|
||||||
|
|
||||||
rest:
|
|
||||||
container_name: supabase-rest
|
|
||||||
image: postgrest/postgrest:v12.2.8
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
PGRST_DB_URI: postgres://authenticator:your-super-secret-and-long-postgres-password@db:5432/postgres
|
|
||||||
PGRST_DB_SCHEMAS: public,storage,graphql_public
|
|
||||||
PGRST_DB_ANON_ROLE: anon
|
|
||||||
PGRST_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
PGRST_DB_USE_LEGACY_GUCS: "false"
|
|
||||||
PGRST_APP_SETTINGS_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
PGRST_APP_SETTINGS_JWT_EXP: 3600
|
|
||||||
command:
|
|
||||||
[
|
|
||||||
"postgrest"
|
|
||||||
]
|
|
||||||
|
|
||||||
realtime:
|
|
||||||
# This container name looks inconsistent but is correct because realtime constructs tenant id by parsing the subdomain
|
|
||||||
container_name: realtime-dev.supabase-realtime
|
|
||||||
image: supabase/realtime:v2.34.40
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"curl",
|
|
||||||
"-sSfL",
|
|
||||||
"--head",
|
|
||||||
"-o",
|
|
||||||
"/dev/null",
|
|
||||||
"-H",
|
|
||||||
"Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE",
|
|
||||||
"http://localhost:4000/api/tenants/realtime-dev/health"
|
|
||||||
]
|
|
||||||
timeout: 5s
|
|
||||||
interval: 5s
|
|
||||||
retries: 3
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
PORT: 4000
|
|
||||||
DB_HOST: db
|
|
||||||
DB_PORT: 5432
|
|
||||||
DB_USER: supabase_admin
|
|
||||||
DB_PASSWORD: your-super-secret-and-long-postgres-password
|
|
||||||
DB_NAME: postgres
|
|
||||||
DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime'
|
|
||||||
DB_ENC_KEY: supabaserealtime
|
|
||||||
API_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
|
||||||
ERL_AFLAGS: -proto_dist inet_tcp
|
|
||||||
DNS_NODES: "''"
|
|
||||||
RLIMIT_NOFILE: "10000"
|
|
||||||
APP_NAME: realtime
|
|
||||||
SEED_SELF_HOST: true
|
|
||||||
RUN_JANITOR: true
|
|
||||||
|
|
||||||
# To use S3 backed storage: docker compose -f docker-compose.yml -f docker-compose.s3.yml up
|
|
||||||
storage:
|
|
||||||
container_name: supabase-storage
|
|
||||||
image: supabase/storage-api:v1.19.3
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ./volumes/storage:/var/lib/storage:z
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"wget",
|
|
||||||
"--no-verbose",
|
|
||||||
"--tries=1",
|
|
||||||
"--spider",
|
|
||||||
"http://storage:5000/status"
|
|
||||||
]
|
|
||||||
timeout: 5s
|
|
||||||
interval: 5s
|
|
||||||
retries: 3
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
rest:
|
|
||||||
condition: service_started
|
|
||||||
imgproxy:
|
|
||||||
condition: service_started
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
|
||||||
SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
|
||||||
POSTGREST_URL: http://rest:3000
|
|
||||||
PGRST_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
DATABASE_URL: postgres://supabase_storage_admin:your-super-secret-and-long-postgres-password@db:5432/postgres
|
|
||||||
FILE_SIZE_LIMIT: 52428800
|
|
||||||
STORAGE_BACKEND: file
|
|
||||||
FILE_STORAGE_BACKEND_PATH: /var/lib/storage
|
|
||||||
TENANT_ID: stub
|
|
||||||
# TODO: https://github.com/supabase/storage-api/issues/55
|
|
||||||
REGION: stub
|
|
||||||
GLOBAL_S3_BUCKET: stub
|
|
||||||
ENABLE_IMAGE_TRANSFORMATION: "true"
|
|
||||||
IMGPROXY_URL: http://imgproxy:5001
|
|
||||||
|
|
||||||
imgproxy:
|
|
||||||
container_name: supabase-imgproxy
|
|
||||||
image: darthsim/imgproxy:v3.8.0
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ./volumes/storage:/var/lib/storage:z
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"imgproxy",
|
|
||||||
"health"
|
|
||||||
]
|
|
||||||
timeout: 5s
|
|
||||||
interval: 5s
|
|
||||||
retries: 3
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
IMGPROXY_BIND: ":5001"
|
|
||||||
IMGPROXY_LOCAL_FILESYSTEM_ROOT: /
|
|
||||||
IMGPROXY_USE_ETAG: "true"
|
|
||||||
IMGPROXY_ENABLE_WEBP_DETECTION: true
|
|
||||||
|
|
||||||
meta:
|
|
||||||
container_name: supabase-meta
|
|
||||||
image: supabase/postgres-meta:v0.86.1
|
|
||||||
restart: unless-stopped
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
PG_META_PORT: 8080
|
|
||||||
PG_META_DB_HOST: db
|
|
||||||
PG_META_DB_PORT: 5432
|
|
||||||
PG_META_DB_NAME: postgres
|
|
||||||
PG_META_DB_USER: supabase_admin
|
|
||||||
PG_META_DB_PASSWORD: your-super-secret-and-long-postgres-password
|
|
||||||
|
|
||||||
functions:
|
|
||||||
container_name: supabase-edge-functions
|
|
||||||
image: supabase/edge-runtime:v1.67.2
|
|
||||||
restart: unless-stopped
|
|
||||||
volumes:
|
|
||||||
- ./volumes/functions:/home/deno/functions:Z
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
SUPABASE_URL: http://kong:8000
|
|
||||||
SUPABASE_SERVICE_ROLE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
|
||||||
SUPABASE_DB_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres
|
|
||||||
# TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786
|
|
||||||
VERIFY_JWT: "false"
|
|
||||||
command:
|
|
||||||
[
|
|
||||||
"start",
|
|
||||||
"--main-service",
|
|
||||||
"/home/deno/functions/main"
|
|
||||||
]
|
|
||||||
|
|
||||||
analytics:
|
|
||||||
container_name: supabase-analytics
|
|
||||||
image: supabase/logflare:1.12.5
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 4000:4000
|
|
||||||
# Uncomment to use Big Query backend for analytics
|
|
||||||
# volumes:
|
|
||||||
# - type: bind
|
|
||||||
# source: ${PWD}/gcloud.json
|
|
||||||
# target: /opt/app/rel/logflare/bin/gcloud.json
|
|
||||||
# read_only: true
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"curl",
|
|
||||||
"http://localhost:4000/health"
|
|
||||||
]
|
|
||||||
timeout: 5s
|
|
||||||
interval: 5s
|
|
||||||
retries: 10
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
# Disable this if you are using an external Postgres database
|
|
||||||
condition: service_healthy
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
LOGFLARE_NODE_HOST: 127.0.0.1
|
|
||||||
DB_USERNAME: supabase_admin
|
|
||||||
DB_DATABASE: _supabase
|
|
||||||
DB_HOSTNAME: db
|
|
||||||
DB_PORT: 5432
|
|
||||||
DB_PASSWORD: your-super-secret-and-long-postgres-password
|
|
||||||
DB_SCHEMA: _analytics
|
|
||||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
|
||||||
LOGFLARE_SINGLE_TENANT: true
|
|
||||||
LOGFLARE_SUPABASE_MODE: true
|
|
||||||
LOGFLARE_MIN_CLUSTER_SIZE: 1
|
|
||||||
|
|
||||||
# Comment variables to use Big Query backend for analytics
|
|
||||||
POSTGRES_BACKEND_URL: postgresql://supabase_admin:your-super-secret-and-long-postgres-password@db:5432/_supabase
|
|
||||||
POSTGRES_BACKEND_SCHEMA: _analytics
|
|
||||||
LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true
|
|
||||||
# Uncomment to use Big Query backend for analytics
|
|
||||||
# GOOGLE_PROJECT_ID: GOOGLE_PROJECT_ID
|
|
||||||
# GOOGLE_PROJECT_NUMBER: GOOGLE_PROJECT_NUMBER
|
|
||||||
|
|
||||||
# Comment out everything below this point if you are using an external Postgres database
|
|
||||||
db:
|
db:
|
||||||
container_name: supabase-db
|
container_name: autogpt-db
|
||||||
image: supabase/postgres:15.8.1.049
|
image: pgvector/pgvector:pg18
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
- ./volumes/db/realtime.sql:/docker-entrypoint-initdb.d/migrations/99-realtime.sql:Z
|
- ./volumes/db/data:/var/lib/postgresql:Z
|
||||||
# Must be superuser to create event trigger
|
|
||||||
- ./volumes/db/webhooks.sql:/docker-entrypoint-initdb.d/init-scripts/98-webhooks.sql:Z
|
|
||||||
# Must be superuser to alter reserved role
|
|
||||||
- ./volumes/db/roles.sql:/docker-entrypoint-initdb.d/init-scripts/99-roles.sql:Z
|
|
||||||
# Initialize the database settings with JWT_SECRET and JWT_EXP
|
|
||||||
- ./volumes/db/jwt.sql:/docker-entrypoint-initdb.d/init-scripts/99-jwt.sql:Z
|
|
||||||
# PGDATA directory is persisted between restarts
|
|
||||||
- ./volumes/db/data:/var/lib/postgresql/data:Z
|
|
||||||
# Changes required for internal supabase data such as _analytics
|
|
||||||
- ./volumes/db/_supabase.sql:/docker-entrypoint-initdb.d/migrations/97-_supabase.sql:Z
|
|
||||||
# Changes required for Analytics support
|
|
||||||
- ./volumes/db/logs.sql:/docker-entrypoint-initdb.d/migrations/99-logs.sql:Z
|
|
||||||
# Changes required for Pooler support
|
|
||||||
- ./volumes/db/pooler.sql:/docker-entrypoint-initdb.d/migrations/99-pooler.sql:Z
|
|
||||||
# Use named volume to persist pgsodium decryption key between restarts
|
|
||||||
- supabase-config:/etc/postgresql-custom
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test:
|
test:
|
||||||
[
|
[
|
||||||
@@ -438,26 +25,23 @@ services:
|
|||||||
interval: 5s
|
interval: 5s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 10
|
retries: 10
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
environment:
|
||||||
<<: *supabase-env
|
POSTGRES_USER: postgres
|
||||||
# Keep any existing environment variables specific to that service
|
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||||
POSTGRES_HOST: /var/run/postgresql
|
POSTGRES_DB: postgres
|
||||||
PGPORT: 5432
|
|
||||||
PGPASSWORD: your-super-secret-and-long-postgres-password
|
|
||||||
PGDATABASE: postgres
|
|
||||||
JWT_EXP: 3600
|
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"postgres",
|
"postgres",
|
||||||
"-c",
|
"-c",
|
||||||
"config_file=/etc/postgresql/postgresql.conf",
|
"wal_level=logical",
|
||||||
"-c",
|
"-c",
|
||||||
"log_min_messages=fatal" # prevents Realtime polling queries from appearing in logs
|
"max_replication_slots=5",
|
||||||
|
"-c",
|
||||||
|
"max_wal_senders=10"
|
||||||
]
|
]
|
||||||
|
|
||||||
vector:
|
vector:
|
||||||
container_name: supabase-vector
|
container_name: autogpt-vector
|
||||||
image: timberio/vector:0.28.1-alpine
|
image: timberio/vector:0.28.1-alpine
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
volumes:
|
volumes:
|
||||||
@@ -476,67 +60,8 @@ services:
|
|||||||
timeout: 5s
|
timeout: 5s
|
||||||
interval: 5s
|
interval: 5s
|
||||||
retries: 3
|
retries: 3
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
command:
|
command:
|
||||||
[
|
[
|
||||||
"--config",
|
"--config",
|
||||||
"/etc/vector/vector.yml"
|
"/etc/vector/vector.yml"
|
||||||
]
|
]
|
||||||
|
|
||||||
# Update the DATABASE_URL if you are using an external Postgres database
|
|
||||||
supavisor:
|
|
||||||
container_name: supabase-pooler
|
|
||||||
image: supabase/supavisor:2.4.12
|
|
||||||
restart: unless-stopped
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
- 6543:6543
|
|
||||||
volumes:
|
|
||||||
- ./volumes/pooler/pooler.exs:/etc/pooler/pooler.exs:ro
|
|
||||||
healthcheck:
|
|
||||||
test:
|
|
||||||
[
|
|
||||||
"CMD",
|
|
||||||
"curl",
|
|
||||||
"-sSfL",
|
|
||||||
"--head",
|
|
||||||
"-o",
|
|
||||||
"/dev/null",
|
|
||||||
"http://127.0.0.1:4000/api/health"
|
|
||||||
]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 5
|
|
||||||
depends_on:
|
|
||||||
db:
|
|
||||||
condition: service_healthy
|
|
||||||
analytics:
|
|
||||||
condition: service_healthy
|
|
||||||
<<: *supabase-env-files
|
|
||||||
environment:
|
|
||||||
<<: *supabase-env
|
|
||||||
# Keep any existing environment variables specific to that service
|
|
||||||
PORT: 4000
|
|
||||||
DATABASE_URL: ecto://supabase_admin:your-super-secret-and-long-postgres-password@db:5432/_supabase
|
|
||||||
CLUSTER_POSTGRES: true
|
|
||||||
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
|
||||||
VAULT_ENC_KEY: your-encryption-key-32-chars-min
|
|
||||||
API_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
METRICS_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
|
||||||
REGION: local
|
|
||||||
ERL_AFLAGS: -proto_dist inet_tcp
|
|
||||||
POOLER_TENANT_ID: your-tenant-id
|
|
||||||
POOLER_DEFAULT_POOL_SIZE: 20
|
|
||||||
POOLER_MAX_CLIENT_CONN: 100
|
|
||||||
POOLER_POOL_MODE: transaction
|
|
||||||
command:
|
|
||||||
[
|
|
||||||
"/bin/sh",
|
|
||||||
"-c",
|
|
||||||
"/app/bin/migrate && /app/bin/supavisor eval \"$$(cat /etc/pooler/pooler.exs)\" && /app/bin/server"
|
|
||||||
]
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
supabase-config:
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ sources:
|
|||||||
docker_host:
|
docker_host:
|
||||||
type: docker_logs
|
type: docker_logs
|
||||||
exclude_containers:
|
exclude_containers:
|
||||||
- supabase-vector
|
- autogpt-vector
|
||||||
|
|
||||||
transforms:
|
transforms:
|
||||||
project_logs:
|
project_logs:
|
||||||
@@ -30,110 +30,9 @@ transforms:
|
|||||||
inputs:
|
inputs:
|
||||||
- project_logs
|
- project_logs
|
||||||
route:
|
route:
|
||||||
kong: '.appname == "supabase-kong"'
|
db: '.appname == "autogpt-db"'
|
||||||
auth: '.appname == "supabase-auth"'
|
backend: '.appname == "autogpt-backend" || .appname == "autogpt-executor" || .appname == "autogpt-rest-server"'
|
||||||
rest: '.appname == "supabase-rest"'
|
frontend: '.appname == "autogpt-frontend"'
|
||||||
realtime: '.appname == "supabase-realtime"'
|
|
||||||
storage: '.appname == "supabase-storage"'
|
|
||||||
functions: '.appname == "supabase-functions"'
|
|
||||||
db: '.appname == "supabase-db"'
|
|
||||||
# Ignores non nginx errors since they are related with kong booting up
|
|
||||||
kong_logs:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.kong
|
|
||||||
source: |-
|
|
||||||
req, err = parse_nginx_log(.event_message, "combined")
|
|
||||||
if err == null {
|
|
||||||
.timestamp = req.timestamp
|
|
||||||
.metadata.request.headers.referer = req.referer
|
|
||||||
.metadata.request.headers.user_agent = req.agent
|
|
||||||
.metadata.request.headers.cf_connecting_ip = req.client
|
|
||||||
.metadata.request.method = req.method
|
|
||||||
.metadata.request.path = req.path
|
|
||||||
.metadata.request.protocol = req.protocol
|
|
||||||
.metadata.response.status_code = req.status
|
|
||||||
}
|
|
||||||
if err != null {
|
|
||||||
abort
|
|
||||||
}
|
|
||||||
# Ignores non nginx errors since they are related with kong booting up
|
|
||||||
kong_err:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.kong
|
|
||||||
source: |-
|
|
||||||
.metadata.request.method = "GET"
|
|
||||||
.metadata.response.status_code = 200
|
|
||||||
parsed, err = parse_nginx_log(.event_message, "error")
|
|
||||||
if err == null {
|
|
||||||
.timestamp = parsed.timestamp
|
|
||||||
.severity = parsed.severity
|
|
||||||
.metadata.request.host = parsed.host
|
|
||||||
.metadata.request.headers.cf_connecting_ip = parsed.client
|
|
||||||
url, err = split(parsed.request, " ")
|
|
||||||
if err == null {
|
|
||||||
.metadata.request.method = url[0]
|
|
||||||
.metadata.request.path = url[1]
|
|
||||||
.metadata.request.protocol = url[2]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err != null {
|
|
||||||
abort
|
|
||||||
}
|
|
||||||
# Gotrue logs are structured json strings which frontend parses directly. But we keep metadata for consistency.
|
|
||||||
auth_logs:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.auth
|
|
||||||
source: |-
|
|
||||||
parsed, err = parse_json(.event_message)
|
|
||||||
if err == null {
|
|
||||||
.metadata.timestamp = parsed.time
|
|
||||||
.metadata = merge!(.metadata, parsed)
|
|
||||||
}
|
|
||||||
# PostgREST logs are structured so we separate timestamp from message using regex
|
|
||||||
rest_logs:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.rest
|
|
||||||
source: |-
|
|
||||||
parsed, err = parse_regex(.event_message, r'^(?P<time>.*): (?P<msg>.*)$')
|
|
||||||
if err == null {
|
|
||||||
.event_message = parsed.msg
|
|
||||||
.timestamp = to_timestamp!(parsed.time)
|
|
||||||
.metadata.host = .project
|
|
||||||
}
|
|
||||||
# Realtime logs are structured so we parse the severity level using regex (ignore time because it has no date)
|
|
||||||
realtime_logs:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.realtime
|
|
||||||
source: |-
|
|
||||||
.metadata.project = del(.project)
|
|
||||||
.metadata.external_id = .metadata.project
|
|
||||||
parsed, err = parse_regex(.event_message, r'^(?P<time>\d+:\d+:\d+\.\d+) \[(?P<level>\w+)\] (?P<msg>.*)$')
|
|
||||||
if err == null {
|
|
||||||
.event_message = parsed.msg
|
|
||||||
.metadata.level = parsed.level
|
|
||||||
}
|
|
||||||
# Storage logs may contain json objects so we parse them for completeness
|
|
||||||
storage_logs:
|
|
||||||
type: remap
|
|
||||||
inputs:
|
|
||||||
- router.storage
|
|
||||||
source: |-
|
|
||||||
.metadata.project = del(.project)
|
|
||||||
.metadata.tenantId = .metadata.project
|
|
||||||
parsed, err = parse_json(.event_message)
|
|
||||||
if err == null {
|
|
||||||
.event_message = parsed.msg
|
|
||||||
.metadata.level = parsed.level
|
|
||||||
.metadata.timestamp = parsed.time
|
|
||||||
.metadata.context[0].host = parsed.hostname
|
|
||||||
.metadata.context[0].pid = parsed.pid
|
|
||||||
}
|
|
||||||
# Postgres logs some messages to stderr which we map to warning severity level
|
|
||||||
db_logs:
|
db_logs:
|
||||||
type: remap
|
type: remap
|
||||||
inputs:
|
inputs:
|
||||||
@@ -156,77 +55,9 @@ transforms:
|
|||||||
.metadata.parsed.error_severity = upcase!(.metadata.parsed.error_severity)
|
.metadata.parsed.error_severity = upcase!(.metadata.parsed.error_severity)
|
||||||
|
|
||||||
sinks:
|
sinks:
|
||||||
logflare_auth:
|
console:
|
||||||
type: 'http'
|
type: console
|
||||||
inputs:
|
inputs:
|
||||||
- auth_logs
|
- project_logs
|
||||||
encoding:
|
encoding:
|
||||||
codec: 'json'
|
codec: json
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=gotrue.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_realtime:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- realtime_logs
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=realtime.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_rest:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- rest_logs
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=postgREST.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_db:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- db_logs
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
# We must route the sink through kong because ingesting logs before logflare is fully initialised will
|
|
||||||
# lead to broken queries from studio. This works by the assumption that containers are started in the
|
|
||||||
# following order: vector > db > logflare > kong
|
|
||||||
uri: 'http://kong:8000/analytics/v1/api/logs?source_name=postgres.logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_functions:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- router.functions
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=deno-relay-logs&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_storage:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- storage_logs
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=storage.logs.prod.2&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
logflare_kong:
|
|
||||||
type: 'http'
|
|
||||||
inputs:
|
|
||||||
- kong_logs
|
|
||||||
- kong_err
|
|
||||||
encoding:
|
|
||||||
codec: 'json'
|
|
||||||
method: 'post'
|
|
||||||
request:
|
|
||||||
retry_max_duration_secs: 10
|
|
||||||
uri: 'http://analytics:4000/api/logs?source_name=cloudflare.logs.prod&api_key=${LOGFLARE_API_KEY?LOGFLARE_API_KEY is required}'
|
|
||||||
|
|||||||
@@ -17,8 +17,6 @@ x-backend-env: &backend-env # Docker internal service hostnames (override localh
|
|||||||
DB_HOST: db
|
DB_HOST: db
|
||||||
REDIS_HOST: redis
|
REDIS_HOST: redis
|
||||||
RABBITMQ_HOST: rabbitmq
|
RABBITMQ_HOST: rabbitmq
|
||||||
# Override Supabase URL for Docker network
|
|
||||||
SUPABASE_URL: http://kong:8000
|
|
||||||
# Database connection string for Docker network
|
# Database connection string for Docker network
|
||||||
# This cannot be constructed like in .env because we cannot interpolate values set here (DB_HOST)
|
# This cannot be constructed like in .env because we cannot interpolate values set here (DB_HOST)
|
||||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||||
@@ -330,7 +328,6 @@ services:
|
|||||||
# Server-side environment variables (Docker service names)
|
# Server-side environment variables (Docker service names)
|
||||||
# These override the localhost URLs from env files when running in Docker
|
# These override the localhost URLs from env files when running in Docker
|
||||||
AUTH_CALLBACK_URL: http://rest_server:8006/auth/callback
|
AUTH_CALLBACK_URL: http://rest_server:8006/auth/callback
|
||||||
SUPABASE_URL: http://kong:8000
|
|
||||||
AGPT_SERVER_URL: http://rest_server:8006/api
|
AGPT_SERVER_URL: http://rest_server:8006/api
|
||||||
AGPT_WS_SERVER_URL: ws://websocket_server:8001/ws
|
AGPT_WS_SERVER_URL: ws://websocket_server:8001/ws
|
||||||
networks:
|
networks:
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ networks:
|
|||||||
name: shared-network
|
name: shared-network
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
supabase-config:
|
|
||||||
clamav-data:
|
clamav-data:
|
||||||
|
|
||||||
x-agpt-services:
|
x-agpt-services:
|
||||||
@@ -14,13 +13,6 @@ x-agpt-services:
|
|||||||
- app-network
|
- app-network
|
||||||
- shared-network
|
- shared-network
|
||||||
|
|
||||||
x-supabase-services:
|
|
||||||
&supabase-services
|
|
||||||
networks:
|
|
||||||
- app-network
|
|
||||||
- shared-network
|
|
||||||
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
# AGPT services
|
# AGPT services
|
||||||
migrate:
|
migrate:
|
||||||
@@ -103,62 +95,23 @@ services:
|
|||||||
file: ./docker-compose.platform.yml
|
file: ./docker-compose.platform.yml
|
||||||
service: frontend
|
service: frontend
|
||||||
|
|
||||||
# Supabase services (minimal: auth + db + kong)
|
# PostgreSQL database (no Supabase auth - using native auth)
|
||||||
kong:
|
|
||||||
<<: *supabase-services
|
|
||||||
extends:
|
|
||||||
file: ./db/docker/docker-compose.yml
|
|
||||||
service: kong
|
|
||||||
|
|
||||||
auth:
|
|
||||||
<<: *supabase-services
|
|
||||||
extends:
|
|
||||||
file: ./db/docker/docker-compose.yml
|
|
||||||
service: auth
|
|
||||||
environment:
|
|
||||||
GOTRUE_MAILER_AUTOCONFIRM: true
|
|
||||||
|
|
||||||
db:
|
db:
|
||||||
<<: *supabase-services
|
<<: *agpt-services
|
||||||
extends:
|
extends:
|
||||||
file: ./db/docker/docker-compose.yml
|
file: ./db/docker/docker-compose.yml
|
||||||
service: db
|
service: db
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432 # We don't use Supavisor locally, so we expose the db directly.
|
- 5432:5432
|
||||||
|
|
||||||
# Studio and its dependencies for local development only
|
|
||||||
meta:
|
|
||||||
<<: *supabase-services
|
|
||||||
profiles:
|
|
||||||
- local
|
|
||||||
extends:
|
|
||||||
file: ./db/docker/docker-compose.yml
|
|
||||||
service: meta
|
|
||||||
|
|
||||||
studio:
|
|
||||||
<<: *supabase-services
|
|
||||||
profiles:
|
|
||||||
- local
|
|
||||||
extends:
|
|
||||||
file: ./db/docker/docker-compose.yml
|
|
||||||
service: studio
|
|
||||||
depends_on:
|
|
||||||
meta:
|
|
||||||
condition: service_healthy
|
|
||||||
# environment:
|
|
||||||
# NEXT_PUBLIC_ENABLE_LOGS: false # Disable analytics/logging features
|
|
||||||
|
|
||||||
deps:
|
deps:
|
||||||
<<: *supabase-services
|
<<: *agpt-services
|
||||||
profiles:
|
profiles:
|
||||||
- local
|
- local
|
||||||
image: busybox
|
image: busybox
|
||||||
command: /bin/true
|
command: /bin/true
|
||||||
depends_on:
|
depends_on:
|
||||||
- kong
|
|
||||||
- auth
|
|
||||||
- db
|
- db
|
||||||
- studio
|
|
||||||
- redis
|
- redis
|
||||||
- rabbitmq
|
- rabbitmq
|
||||||
- clamav
|
- clamav
|
||||||
|
|||||||
@@ -1,7 +1,3 @@
|
|||||||
# Supabase
|
|
||||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
|
||||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
|
||||||
|
|
||||||
# Back-end services
|
# Back-end services
|
||||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||||
|
|||||||
@@ -166,7 +166,8 @@ By integrating Storybook into our development workflow, we can streamline UI dev
|
|||||||
|
|
||||||
### Backend & Services
|
### Backend & Services
|
||||||
|
|
||||||
- [**Supabase**](https://supabase.com/) - Backend-as-a-Service (database, auth, storage)
|
- [**FastAPI**](https://fastapi.tiangolo.com/) - Python API framework (backend)
|
||||||
|
- [**PostgreSQL**](https://www.postgresql.org/) - Database
|
||||||
- [**Sentry**](https://sentry.io/) - Error monitoring and performance tracking
|
- [**Sentry**](https://sentry.io/) - Error monitoring and performance tracking
|
||||||
|
|
||||||
### Package Management
|
### Package Management
|
||||||
|
|||||||
@@ -55,8 +55,6 @@
|
|||||||
"@rjsf/utils": "5.24.13",
|
"@rjsf/utils": "5.24.13",
|
||||||
"@rjsf/validator-ajv8": "5.24.13",
|
"@rjsf/validator-ajv8": "5.24.13",
|
||||||
"@sentry/nextjs": "10.27.0",
|
"@sentry/nextjs": "10.27.0",
|
||||||
"@supabase/ssr": "0.7.0",
|
|
||||||
"@supabase/supabase-js": "2.78.0",
|
|
||||||
"@tanstack/react-query": "5.90.6",
|
"@tanstack/react-query": "5.90.6",
|
||||||
"@tanstack/react-table": "8.21.3",
|
"@tanstack/react-table": "8.21.3",
|
||||||
"@types/jaro-winkler": "0.2.4",
|
"@types/jaro-winkler": "0.2.4",
|
||||||
@@ -76,6 +74,7 @@
|
|||||||
"framer-motion": "12.23.24",
|
"framer-motion": "12.23.24",
|
||||||
"geist": "1.5.1",
|
"geist": "1.5.1",
|
||||||
"highlight.js": "11.11.1",
|
"highlight.js": "11.11.1",
|
||||||
|
"import-in-the-middle": "2.0.1",
|
||||||
"jaro-winkler": "0.2.8",
|
"jaro-winkler": "0.2.8",
|
||||||
"katex": "0.16.25",
|
"katex": "0.16.25",
|
||||||
"launchdarkly-react-client-sdk": "3.9.0",
|
"launchdarkly-react-client-sdk": "3.9.0",
|
||||||
@@ -146,7 +145,7 @@
|
|||||||
"postcss": "8.5.6",
|
"postcss": "8.5.6",
|
||||||
"prettier": "3.6.2",
|
"prettier": "3.6.2",
|
||||||
"prettier-plugin-tailwindcss": "0.7.1",
|
"prettier-plugin-tailwindcss": "0.7.1",
|
||||||
"require-in-the-middle": "7.5.2",
|
"require-in-the-middle": "8.0.1",
|
||||||
"storybook": "9.1.5",
|
"storybook": "9.1.5",
|
||||||
"tailwindcss": "3.4.17",
|
"tailwindcss": "3.4.17",
|
||||||
"typescript": "5.9.3"
|
"typescript": "5.9.3"
|
||||||
|
|||||||
144
autogpt_platform/frontend/pnpm-lock.yaml
generated
144
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -89,12 +89,6 @@ importers:
|
|||||||
'@sentry/nextjs':
|
'@sentry/nextjs':
|
||||||
specifier: 10.27.0
|
specifier: 10.27.0
|
||||||
version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))
|
version: 10.27.0(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(next@15.4.10(@babel/core@7.28.4)(@opentelemetry/api@1.9.0)(@playwright/test@1.56.1)(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1)(webpack@5.101.3(esbuild@0.25.9))
|
||||||
'@supabase/ssr':
|
|
||||||
specifier: 0.7.0
|
|
||||||
version: 0.7.0(@supabase/supabase-js@2.78.0)
|
|
||||||
'@supabase/supabase-js':
|
|
||||||
specifier: 2.78.0
|
|
||||||
version: 2.78.0
|
|
||||||
'@tanstack/react-query':
|
'@tanstack/react-query':
|
||||||
specifier: 5.90.6
|
specifier: 5.90.6
|
||||||
version: 5.90.6(react@18.3.1)
|
version: 5.90.6(react@18.3.1)
|
||||||
@@ -152,6 +146,9 @@ importers:
|
|||||||
highlight.js:
|
highlight.js:
|
||||||
specifier: 11.11.1
|
specifier: 11.11.1
|
||||||
version: 11.11.1
|
version: 11.11.1
|
||||||
|
import-in-the-middle:
|
||||||
|
specifier: 2.0.1
|
||||||
|
version: 2.0.1
|
||||||
jaro-winkler:
|
jaro-winkler:
|
||||||
specifier: 0.2.8
|
specifier: 0.2.8
|
||||||
version: 0.2.8
|
version: 0.2.8
|
||||||
@@ -358,8 +355,8 @@ importers:
|
|||||||
specifier: 0.7.1
|
specifier: 0.7.1
|
||||||
version: 0.7.1(prettier@3.6.2)
|
version: 0.7.1(prettier@3.6.2)
|
||||||
require-in-the-middle:
|
require-in-the-middle:
|
||||||
specifier: 7.5.2
|
specifier: 8.0.1
|
||||||
version: 7.5.2
|
version: 8.0.1
|
||||||
storybook:
|
storybook:
|
||||||
specifier: 9.1.5
|
specifier: 9.1.5
|
||||||
version: 9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2)
|
version: 9.1.5(@testing-library/dom@10.4.1)(msw@2.11.6(@types/node@24.10.0)(typescript@5.9.3))(prettier@3.6.2)
|
||||||
@@ -3024,33 +3021,6 @@ packages:
|
|||||||
typescript:
|
typescript:
|
||||||
optional: true
|
optional: true
|
||||||
|
|
||||||
'@supabase/auth-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-cXDtu1U0LeZj/xfnFoV7yCze37TcbNo8FCxy1FpqhMbB9u9QxxDSW6pA5gm/07Ei7m260Lof4CZx67Cu6DPeig==}
|
|
||||||
|
|
||||||
'@supabase/functions-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-t1jOvArBsOINyqaRee1xJ3gryXLvkBzqnKfi6q3YRzzhJbGS6eXz0pXR5fqmJeB01fLC+1njpf3YhMszdPEF7g==}
|
|
||||||
|
|
||||||
'@supabase/node-fetch@2.6.15':
|
|
||||||
resolution: {integrity: sha512-1ibVeYUacxWYi9i0cf5efil6adJ9WRyZBLivgjs+AUpewx1F3xPi7gLgaASI2SmIQxPoCEjAsLAzKPgMJVgOUQ==}
|
|
||||||
engines: {node: 4.x || >=6.0.0}
|
|
||||||
|
|
||||||
'@supabase/postgrest-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-AwhpYlSvJ+PSnPmIK8sHj7NGDyDENYfQGKrMtpVIEzQA2ApUjgpUGxzXWN4Z0wEtLQsvv7g4y9HVad9Hzo1TNA==}
|
|
||||||
|
|
||||||
'@supabase/realtime-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-rCs1zmLe7of7hj4s7G9z8rTqzWuNVtmwDr3FiCRCJFawEoa+RQO1xpZGbdeuVvVmKDyVN6b542Okci+117y/LQ==}
|
|
||||||
|
|
||||||
'@supabase/ssr@0.7.0':
|
|
||||||
resolution: {integrity: sha512-G65t5EhLSJ5c8hTCcXifSL9Q/ZRXvqgXeNo+d3P56f4U1IxwTqjB64UfmfixvmMcjuxnq2yGqEWVJqUcO+AzAg==}
|
|
||||||
peerDependencies:
|
|
||||||
'@supabase/supabase-js': ^2.43.4
|
|
||||||
|
|
||||||
'@supabase/storage-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-n17P0JbjHOlxqJpkaGFOn97i3EusEKPEbWOpuk1r4t00Wg06B8Z4GUiq0O0n1vUpjiMgJUkLIMuBVp+bEgunzQ==}
|
|
||||||
|
|
||||||
'@supabase/supabase-js@2.78.0':
|
|
||||||
resolution: {integrity: sha512-xYMRNBFmKp2m1gMuwcp/gr/HlfZKqjye1Ib8kJe29XJNsgwsfO/f8skxnWiscFKTlkOKLuBexNgl5L8dzGt6vA==}
|
|
||||||
|
|
||||||
'@swc/helpers@0.5.15':
|
'@swc/helpers@0.5.15':
|
||||||
resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==}
|
resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==}
|
||||||
|
|
||||||
@@ -3242,9 +3212,6 @@ packages:
|
|||||||
'@types/pg@8.15.6':
|
'@types/pg@8.15.6':
|
||||||
resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==}
|
resolution: {integrity: sha512-NoaMtzhxOrubeL/7UZuNTrejB4MPAJ0RpxZqXQf2qXuVlTPuG6Y8p4u9dKRaue4yjmC7ZhzVO2/Yyyn25znrPQ==}
|
||||||
|
|
||||||
'@types/phoenix@1.6.6':
|
|
||||||
resolution: {integrity: sha512-PIzZZlEppgrpoT2QgbnDU+MMzuR6BbCjllj0bM70lWoejMeNJAxCchxnv7J3XFkI8MpygtRpzXrIlmWUBclP5A==}
|
|
||||||
|
|
||||||
'@types/prop-types@15.7.15':
|
'@types/prop-types@15.7.15':
|
||||||
resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==}
|
resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==}
|
||||||
|
|
||||||
@@ -3289,9 +3256,6 @@ packages:
|
|||||||
'@types/use-sync-external-store@0.0.6':
|
'@types/use-sync-external-store@0.0.6':
|
||||||
resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==}
|
resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==}
|
||||||
|
|
||||||
'@types/ws@8.18.1':
|
|
||||||
resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
|
|
||||||
|
|
||||||
'@typescript-eslint/eslint-plugin@8.48.1':
|
'@typescript-eslint/eslint-plugin@8.48.1':
|
||||||
resolution: {integrity: sha512-X63hI1bxl5ohelzr0LY5coufyl0LJNthld+abwxpCoo6Gq+hSqhKwci7MUWkXo67mzgUK6YFByhmaHmUcuBJmA==}
|
resolution: {integrity: sha512-X63hI1bxl5ohelzr0LY5coufyl0LJNthld+abwxpCoo6Gq+hSqhKwci7MUWkXo67mzgUK6YFByhmaHmUcuBJmA==}
|
||||||
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
|
||||||
@@ -5213,8 +5177,8 @@ packages:
|
|||||||
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
|
resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==}
|
||||||
engines: {node: '>=6'}
|
engines: {node: '>=6'}
|
||||||
|
|
||||||
import-in-the-middle@2.0.0:
|
import-in-the-middle@2.0.1:
|
||||||
resolution: {integrity: sha512-yNZhyQYqXpkT0AKq3F3KLasUSK4fHvebNH5hOsKQw2dhGSALvQ4U0BqUc5suziKvydO5u5hgN2hy1RJaho8U5A==}
|
resolution: {integrity: sha512-bruMpJ7xz+9jwGzrwEhWgvRrlKRYCRDBrfU+ur3FcasYXLJDxTruJ//8g2Noj+QFyRBeqbpj8Bhn4Fbw6HjvhA==}
|
||||||
|
|
||||||
imurmurhash@0.1.4:
|
imurmurhash@0.1.4:
|
||||||
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
|
resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==}
|
||||||
@@ -6788,10 +6752,6 @@ packages:
|
|||||||
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
|
resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==}
|
||||||
engines: {node: '>=0.10.0'}
|
engines: {node: '>=0.10.0'}
|
||||||
|
|
||||||
require-in-the-middle@7.5.2:
|
|
||||||
resolution: {integrity: sha512-gAZ+kLqBdHarXB64XpAe2VCjB7rIRv+mU8tfRWziHRJ5umKsIHN2tLLv6EtMw7WCdP19S0ERVMldNvxYCHnhSQ==}
|
|
||||||
engines: {node: '>=8.6.0'}
|
|
||||||
|
|
||||||
require-in-the-middle@8.0.1:
|
require-in-the-middle@8.0.1:
|
||||||
resolution: {integrity: sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==}
|
resolution: {integrity: sha512-QT7FVMXfWOYFbeRBF6nu+I6tr2Tf3u0q8RIEjNob/heKY/nh7drD/k7eeMFmSQgnTtCzLDcCu/XEnpW2wk4xCQ==}
|
||||||
engines: {node: '>=9.3.0 || >=8.10.0 <9.0.0'}
|
engines: {node: '>=9.3.0 || >=8.10.0 <9.0.0'}
|
||||||
@@ -9324,7 +9284,7 @@ snapshots:
|
|||||||
dependencies:
|
dependencies:
|
||||||
'@opentelemetry/api': 1.9.0
|
'@opentelemetry/api': 1.9.0
|
||||||
'@opentelemetry/api-logs': 0.208.0
|
'@opentelemetry/api-logs': 0.208.0
|
||||||
import-in-the-middle: 2.0.0
|
import-in-the-middle: 2.0.1
|
||||||
require-in-the-middle: 8.0.1
|
require-in-the-middle: 8.0.1
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
@@ -10363,7 +10323,7 @@ snapshots:
|
|||||||
'@opentelemetry/semantic-conventions': 1.37.0
|
'@opentelemetry/semantic-conventions': 1.37.0
|
||||||
'@sentry/core': 10.27.0
|
'@sentry/core': 10.27.0
|
||||||
'@sentry/opentelemetry': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
'@sentry/opentelemetry': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
||||||
import-in-the-middle: 2.0.0
|
import-in-the-middle: 2.0.1
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
@@ -10402,7 +10362,7 @@ snapshots:
|
|||||||
'@sentry/core': 10.27.0
|
'@sentry/core': 10.27.0
|
||||||
'@sentry/node-core': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.208.0(@opentelemetry/api@1.9.0))(@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
'@sentry/node-core': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/instrumentation@0.208.0(@opentelemetry/api@1.9.0))(@opentelemetry/resources@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
||||||
'@sentry/opentelemetry': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
'@sentry/opentelemetry': 10.27.0(@opentelemetry/api@1.9.0)(@opentelemetry/context-async-hooks@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/core@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.2.0(@opentelemetry/api@1.9.0))(@opentelemetry/semantic-conventions@1.37.0)
|
||||||
import-in-the-middle: 2.0.0
|
import-in-the-middle: 2.0.1
|
||||||
minimatch: 9.0.5
|
minimatch: 9.0.5
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
@@ -10814,58 +10774,6 @@ snapshots:
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
typescript: 5.9.3
|
typescript: 5.9.3
|
||||||
|
|
||||||
'@supabase/auth-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
tslib: 2.8.1
|
|
||||||
|
|
||||||
'@supabase/functions-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
tslib: 2.8.1
|
|
||||||
|
|
||||||
'@supabase/node-fetch@2.6.15':
|
|
||||||
dependencies:
|
|
||||||
whatwg-url: 5.0.0
|
|
||||||
|
|
||||||
'@supabase/postgrest-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
tslib: 2.8.1
|
|
||||||
|
|
||||||
'@supabase/realtime-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
'@types/phoenix': 1.6.6
|
|
||||||
'@types/ws': 8.18.1
|
|
||||||
tslib: 2.8.1
|
|
||||||
ws: 8.18.3
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- bufferutil
|
|
||||||
- utf-8-validate
|
|
||||||
|
|
||||||
'@supabase/ssr@0.7.0(@supabase/supabase-js@2.78.0)':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/supabase-js': 2.78.0
|
|
||||||
cookie: 1.0.2
|
|
||||||
|
|
||||||
'@supabase/storage-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
tslib: 2.8.1
|
|
||||||
|
|
||||||
'@supabase/supabase-js@2.78.0':
|
|
||||||
dependencies:
|
|
||||||
'@supabase/auth-js': 2.78.0
|
|
||||||
'@supabase/functions-js': 2.78.0
|
|
||||||
'@supabase/node-fetch': 2.6.15
|
|
||||||
'@supabase/postgrest-js': 2.78.0
|
|
||||||
'@supabase/realtime-js': 2.78.0
|
|
||||||
'@supabase/storage-js': 2.78.0
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- bufferutil
|
|
||||||
- utf-8-validate
|
|
||||||
|
|
||||||
'@swc/helpers@0.5.15':
|
'@swc/helpers@0.5.15':
|
||||||
dependencies:
|
dependencies:
|
||||||
tslib: 2.8.1
|
tslib: 2.8.1
|
||||||
@@ -11078,8 +10986,6 @@ snapshots:
|
|||||||
pg-protocol: 1.10.3
|
pg-protocol: 1.10.3
|
||||||
pg-types: 2.2.0
|
pg-types: 2.2.0
|
||||||
|
|
||||||
'@types/phoenix@1.6.6': {}
|
|
||||||
|
|
||||||
'@types/prop-types@15.7.15': {}
|
'@types/prop-types@15.7.15': {}
|
||||||
|
|
||||||
'@types/react-dom@18.3.5(@types/react@18.3.17)':
|
'@types/react-dom@18.3.5(@types/react@18.3.17)':
|
||||||
@@ -11119,10 +11025,6 @@ snapshots:
|
|||||||
|
|
||||||
'@types/use-sync-external-store@0.0.6': {}
|
'@types/use-sync-external-store@0.0.6': {}
|
||||||
|
|
||||||
'@types/ws@8.18.1':
|
|
||||||
dependencies:
|
|
||||||
'@types/node': 24.10.0
|
|
||||||
|
|
||||||
'@typescript-eslint/eslint-plugin@8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)':
|
'@typescript-eslint/eslint-plugin@8.48.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1)(typescript@5.9.3)':
|
||||||
dependencies:
|
dependencies:
|
||||||
'@eslint-community/regexpp': 4.12.2
|
'@eslint-community/regexpp': 4.12.2
|
||||||
@@ -12649,8 +12551,8 @@ snapshots:
|
|||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1)
|
||||||
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1)
|
||||||
eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1)
|
eslint-plugin-jsx-a11y: 6.10.2(eslint@8.57.1)
|
||||||
eslint-plugin-react: 7.37.5(eslint@8.57.1)
|
eslint-plugin-react: 7.37.5(eslint@8.57.1)
|
||||||
eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1)
|
eslint-plugin-react-hooks: 5.2.0(eslint@8.57.1)
|
||||||
@@ -12669,7 +12571,7 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1):
|
eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@nolyfill/is-core-module': 1.0.39
|
'@nolyfill/is-core-module': 1.0.39
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
@@ -12680,22 +12582,22 @@ snapshots:
|
|||||||
tinyglobby: 0.2.15
|
tinyglobby: 0.2.15
|
||||||
unrs-resolver: 1.11.1
|
unrs-resolver: 1.11.1
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
eslint-module-utils@2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
debug: 3.2.7
|
debug: 3.2.7
|
||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
'@typescript-eslint/parser': 8.48.1(eslint@8.57.1)(typescript@5.9.3)
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0)(eslint@8.57.1)
|
eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1):
|
eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1):
|
||||||
dependencies:
|
dependencies:
|
||||||
'@rtsao/scc': 1.1.0
|
'@rtsao/scc': 1.1.0
|
||||||
array-includes: 3.1.9
|
array-includes: 3.1.9
|
||||||
@@ -12706,7 +12608,7 @@ snapshots:
|
|||||||
doctrine: 2.1.0
|
doctrine: 2.1.0
|
||||||
eslint: 8.57.1
|
eslint: 8.57.1
|
||||||
eslint-import-resolver-node: 0.3.9
|
eslint-import-resolver-node: 0.3.9
|
||||||
eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@8.57.1)
|
eslint-module-utils: 2.12.1(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.48.1(eslint@8.57.1)(typescript@5.9.3))(eslint@8.57.1))(eslint@8.57.1))(eslint@8.57.1)
|
||||||
hasown: 2.0.2
|
hasown: 2.0.2
|
||||||
is-core-module: 2.16.1
|
is-core-module: 2.16.1
|
||||||
is-glob: 4.0.3
|
is-glob: 4.0.3
|
||||||
@@ -13385,7 +13287,7 @@ snapshots:
|
|||||||
parent-module: 1.0.1
|
parent-module: 1.0.1
|
||||||
resolve-from: 4.0.0
|
resolve-from: 4.0.0
|
||||||
|
|
||||||
import-in-the-middle@2.0.0:
|
import-in-the-middle@2.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
acorn: 8.15.0
|
acorn: 8.15.0
|
||||||
acorn-import-attributes: 1.9.5(acorn@8.15.0)
|
acorn-import-attributes: 1.9.5(acorn@8.15.0)
|
||||||
@@ -15262,14 +15164,6 @@ snapshots:
|
|||||||
|
|
||||||
require-from-string@2.0.2: {}
|
require-from-string@2.0.2: {}
|
||||||
|
|
||||||
require-in-the-middle@7.5.2:
|
|
||||||
dependencies:
|
|
||||||
debug: 4.4.3
|
|
||||||
module-details-from-path: 1.0.4
|
|
||||||
resolve: 1.22.10
|
|
||||||
transitivePeerDependencies:
|
|
||||||
- supports-color
|
|
||||||
|
|
||||||
require-in-the-middle@8.0.1:
|
require-in-the-middle@8.0.1:
|
||||||
dependencies:
|
dependencies:
|
||||||
debug: 4.4.3
|
debug: 4.4.3
|
||||||
|
|||||||
@@ -1,92 +1,74 @@
|
|||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
|
||||||
import { NextResponse } from "next/server";
|
import { NextResponse } from "next/server";
|
||||||
import { revalidatePath } from "next/cache";
|
import { revalidatePath } from "next/cache";
|
||||||
import { shouldShowOnboarding } from "@/app/api/helpers";
|
import { shouldShowOnboarding } from "@/app/api/helpers";
|
||||||
|
import { setAuthCookies } from "@/lib/auth/cookies";
|
||||||
|
import { environment } from "@/services/environment";
|
||||||
|
|
||||||
// Handle the callback to complete the user session login
|
// Handle the OAuth callback from the backend
|
||||||
export async function GET(request: Request) {
|
export async function GET(request: Request) {
|
||||||
const { searchParams, origin } = new URL(request.url);
|
const { searchParams, origin } = new URL(request.url);
|
||||||
const code = searchParams.get("code");
|
const code = searchParams.get("code");
|
||||||
|
const state = searchParams.get("state");
|
||||||
|
|
||||||
let next = "/marketplace";
|
let next = "/marketplace";
|
||||||
|
|
||||||
if (code) {
|
if (code) {
|
||||||
const supabase = await getServerSupabase();
|
try {
|
||||||
|
// Exchange the code with the backend's Google OAuth callback
|
||||||
|
const callbackUrl = new URL(
|
||||||
|
`${environment.getAGPTServerApiUrl()}/auth/google/callback`,
|
||||||
|
);
|
||||||
|
callbackUrl.searchParams.set("code", code);
|
||||||
|
if (state) {
|
||||||
|
callbackUrl.searchParams.set("state", state);
|
||||||
|
}
|
||||||
|
|
||||||
if (!supabase) {
|
const response = await fetch(callbackUrl.toString());
|
||||||
return NextResponse.redirect(`${origin}/error`);
|
const data = await response.json();
|
||||||
}
|
|
||||||
|
|
||||||
const { error } = await supabase.auth.exchangeCodeForSession(code);
|
if (!response.ok) {
|
||||||
|
console.error("OAuth callback error:", data);
|
||||||
|
return NextResponse.redirect(`${origin}/auth/auth-code-error`);
|
||||||
|
}
|
||||||
|
|
||||||
if (!error) {
|
// Set the auth cookies with the tokens from the backend
|
||||||
try {
|
if (data.access_token && data.refresh_token) {
|
||||||
const api = new BackendAPI();
|
await setAuthCookies(
|
||||||
await api.createUser();
|
data.access_token,
|
||||||
|
data.refresh_token,
|
||||||
|
data.expires_in || 900, // Default 15 minutes
|
||||||
|
);
|
||||||
|
|
||||||
if (await shouldShowOnboarding()) {
|
// Check if onboarding is needed
|
||||||
next = "/onboarding";
|
// Note: This may fail for OAuth logins since the cookies were just set
|
||||||
revalidatePath("/onboarding", "layout");
|
// on the response and aren't available for the backend request yet.
|
||||||
} else {
|
// In that case, just go to marketplace and let client-side handle onboarding.
|
||||||
|
try {
|
||||||
|
if (await shouldShowOnboarding()) {
|
||||||
|
next = "/onboarding";
|
||||||
|
revalidatePath("/onboarding", "layout");
|
||||||
|
} else {
|
||||||
|
revalidatePath("/", "layout");
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// If onboarding check fails, just go to marketplace
|
||||||
revalidatePath("/", "layout");
|
revalidatePath("/", "layout");
|
||||||
}
|
}
|
||||||
} catch (createUserError) {
|
|
||||||
console.error("Error creating user:", createUserError);
|
|
||||||
|
|
||||||
// Handle ApiError from the backend API client
|
const forwardedHost = request.headers.get("x-forwarded-host");
|
||||||
if (
|
const isLocalEnv = process.env.NODE_ENV === "development";
|
||||||
createUserError &&
|
|
||||||
typeof createUserError === "object" &&
|
|
||||||
"status" in createUserError
|
|
||||||
) {
|
|
||||||
const apiError = createUserError as any;
|
|
||||||
|
|
||||||
if (apiError.status === 401) {
|
if (isLocalEnv) {
|
||||||
// Authentication issues - token missing/invalid
|
return NextResponse.redirect(`${origin}${next}`);
|
||||||
return NextResponse.redirect(
|
} else if (forwardedHost) {
|
||||||
`${origin}/error?message=auth-token-invalid`,
|
return NextResponse.redirect(`https://${forwardedHost}${next}`);
|
||||||
);
|
} else {
|
||||||
} else if (apiError.status >= 500) {
|
return NextResponse.redirect(`${origin}${next}`);
|
||||||
// Server/database errors
|
|
||||||
return NextResponse.redirect(
|
|
||||||
`${origin}/error?message=server-error`,
|
|
||||||
);
|
|
||||||
} else if (apiError.status === 429) {
|
|
||||||
// Rate limiting
|
|
||||||
return NextResponse.redirect(
|
|
||||||
`${origin}/error?message=rate-limited`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle network/fetch errors
|
|
||||||
if (
|
|
||||||
createUserError instanceof TypeError &&
|
|
||||||
createUserError.message.includes("fetch")
|
|
||||||
) {
|
|
||||||
return NextResponse.redirect(`${origin}/error?message=network-error`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generic user creation failure
|
|
||||||
return NextResponse.redirect(
|
|
||||||
`${origin}/error?message=user-creation-failed`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
|
||||||
next = searchParams.get("next") || next;
|
|
||||||
|
|
||||||
const forwardedHost = request.headers.get("x-forwarded-host"); // original origin before load balancer
|
|
||||||
const isLocalEnv = process.env.NODE_ENV === "development";
|
|
||||||
if (isLocalEnv) {
|
|
||||||
// we can be sure that there is no load balancer in between, so no need to watch for X-Forwarded-Host
|
|
||||||
return NextResponse.redirect(`${origin}${next}`);
|
|
||||||
} else if (forwardedHost) {
|
|
||||||
return NextResponse.redirect(`https://${forwardedHost}${next}`);
|
|
||||||
} else {
|
|
||||||
return NextResponse.redirect(`${origin}${next}`);
|
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error("OAuth callback error:", error);
|
||||||
|
return NextResponse.redirect(`${origin}/auth/auth-code-error`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,30 +1,28 @@
|
|||||||
import { type EmailOtpType } from "@supabase/supabase-js";
|
|
||||||
import { type NextRequest } from "next/server";
|
import { type NextRequest } from "next/server";
|
||||||
|
|
||||||
import { redirect } from "next/navigation";
|
import { redirect } from "next/navigation";
|
||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
import { environment } from "@/services/environment";
|
||||||
|
|
||||||
// Email confirmation route
|
// Email confirmation route
|
||||||
export async function GET(request: NextRequest) {
|
export async function GET(request: NextRequest) {
|
||||||
const { searchParams } = new URL(request.url);
|
const { searchParams } = new URL(request.url);
|
||||||
const token_hash = searchParams.get("token_hash");
|
const token = searchParams.get("token");
|
||||||
const type = searchParams.get("type") as EmailOtpType | null;
|
|
||||||
const next = searchParams.get("next") ?? "/";
|
const next = searchParams.get("next") ?? "/";
|
||||||
|
|
||||||
if (token_hash && type) {
|
if (token) {
|
||||||
const supabase = await getServerSupabase();
|
try {
|
||||||
|
const response = await fetch(
|
||||||
|
`${environment.getAGPTServerBaseUrl()}/api/auth/verify-email?token=${encodeURIComponent(token)}`,
|
||||||
|
{
|
||||||
|
method: "GET",
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
if (!supabase) {
|
if (response.ok) {
|
||||||
redirect("/error");
|
// redirect user to specified redirect URL or root of app
|
||||||
}
|
redirect(next);
|
||||||
|
}
|
||||||
const { error } = await supabase.auth.verifyOtp({
|
} catch (error) {
|
||||||
type,
|
console.error("Email verification error:", error);
|
||||||
token_hash,
|
|
||||||
});
|
|
||||||
if (!error) {
|
|
||||||
// redirect user to specified redirect URL or root of app
|
|
||||||
redirect(next);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import { ToolCallMessage } from "@/app/(platform)/chat/components/ToolCallMessag
|
|||||||
import { ToolResponseMessage } from "@/app/(platform)/chat/components/ToolResponseMessage/ToolResponseMessage";
|
import { ToolResponseMessage } from "@/app/(platform)/chat/components/ToolResponseMessage/ToolResponseMessage";
|
||||||
import { AuthPromptWidget } from "@/app/(platform)/chat/components/AuthPromptWidget/AuthPromptWidget";
|
import { AuthPromptWidget } from "@/app/(platform)/chat/components/AuthPromptWidget/AuthPromptWidget";
|
||||||
import { ChatCredentialsSetup } from "@/app/(platform)/chat/components/ChatCredentialsSetup/ChatCredentialsSetup";
|
import { ChatCredentialsSetup } from "@/app/(platform)/chat/components/ChatCredentialsSetup/ChatCredentialsSetup";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useAuth } from "@/lib/auth";
|
||||||
import { useChatMessage, type ChatMessageData } from "./useChatMessage";
|
import { useChatMessage, type ChatMessageData } from "./useChatMessage";
|
||||||
import { getToolActionPhrase } from "@/app/(platform)/chat/helpers";
|
import { getToolActionPhrase } from "@/app/(platform)/chat/helpers";
|
||||||
export interface ChatMessageProps {
|
export interface ChatMessageProps {
|
||||||
@@ -26,7 +26,7 @@ export function ChatMessage({
|
|||||||
onDismissCredentials,
|
onDismissCredentials,
|
||||||
onSendMessage,
|
onSendMessage,
|
||||||
}: ChatMessageProps) {
|
}: ChatMessageProps) {
|
||||||
const { user } = useSupabase();
|
const { user } = useAuth();
|
||||||
const {
|
const {
|
||||||
formattedTimestamp,
|
formattedTimestamp,
|
||||||
isUser,
|
isUser,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import { useEffect, useRef } from "react";
|
|||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
import { toast } from "sonner";
|
import { toast } from "sonner";
|
||||||
import { useChatSession } from "@/app/(platform)/chat/useChatSession";
|
import { useChatSession } from "@/app/(platform)/chat/useChatSession";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useAuth } from "@/lib/auth";
|
||||||
import { useChatStream } from "@/app/(platform)/chat/useChatStream";
|
import { useChatStream } from "@/app/(platform)/chat/useChatStream";
|
||||||
|
|
||||||
export function useChatPage() {
|
export function useChatPage() {
|
||||||
@@ -14,7 +14,7 @@ export function useChatPage() {
|
|||||||
searchParams.get("session_id") || searchParams.get("session");
|
searchParams.get("session_id") || searchParams.get("session");
|
||||||
const hasCreatedSessionRef = useRef(false);
|
const hasCreatedSessionRef = useRef(false);
|
||||||
const hasClaimedSessionRef = useRef(false);
|
const hasClaimedSessionRef = useRef(false);
|
||||||
const { user } = useSupabase();
|
const { user } = useAuth();
|
||||||
const { sendMessage: sendStreamMessage } = useChatStream();
|
const { sendMessage: sendStreamMessage } = useChatStream();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"use server";
|
"use server";
|
||||||
|
|
||||||
|
import { serverLogin } from "@/lib/auth/actions";
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
import BackendAPI from "@/lib/autogpt-server-api";
|
||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
|
||||||
import { loginFormSchema } from "@/types/auth";
|
import { loginFormSchema } from "@/types/auth";
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
import { shouldShowOnboarding } from "../../api/helpers";
|
import { shouldShowOnboarding } from "../../api/helpers";
|
||||||
@@ -17,27 +17,34 @@ export async function login(email: string, password: string) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const supabase = await getServerSupabase();
|
const result = await serverLogin({
|
||||||
if (!supabase) {
|
email: parsed.data.email,
|
||||||
|
password: parsed.data.password,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.error || !result.data) {
|
||||||
return {
|
return {
|
||||||
success: false,
|
success: false,
|
||||||
error: "Authentication service unavailable",
|
error: result.error?.message || "Login failed",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const { error } = await supabase.auth.signInWithPassword(parsed.data);
|
// Note: API calls may fail here because the auth cookies we just set
|
||||||
if (error) {
|
// aren't available yet for the proxy route (it's a new HTTP request).
|
||||||
return {
|
// Default to showing onboarding if we can't check, and let the
|
||||||
success: false,
|
// onboarding flow handle user creation if needed.
|
||||||
error: error.message,
|
let onboarding = true;
|
||||||
};
|
try {
|
||||||
|
const api = new BackendAPI();
|
||||||
|
await api.createUser();
|
||||||
|
onboarding = await shouldShowOnboarding();
|
||||||
|
} catch (error) {
|
||||||
|
console.debug(
|
||||||
|
"Could not complete post-login setup, defaulting to onboarding:",
|
||||||
|
error,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const api = new BackendAPI();
|
|
||||||
await api.createUser();
|
|
||||||
|
|
||||||
const onboarding = await shouldShowOnboarding();
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
onboarding,
|
onboarding,
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
"use client";
|
"use client";
|
||||||
import { Form, FormField } from "@/components/__legacy__/ui/form";
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
import { Input } from "@/components/atoms/Input/Input";
|
import { Input } from "@/components/atoms/Input/Input";
|
||||||
import { Link } from "@/components/atoms/Link/Link";
|
import { Link } from "@/components/atoms/Link/Link";
|
||||||
@@ -7,10 +6,11 @@ import { AuthCard } from "@/components/auth/AuthCard";
|
|||||||
import AuthFeedback from "@/components/auth/AuthFeedback";
|
import AuthFeedback from "@/components/auth/AuthFeedback";
|
||||||
import { EmailNotAllowedModal } from "@/components/auth/EmailNotAllowedModal";
|
import { EmailNotAllowedModal } from "@/components/auth/EmailNotAllowedModal";
|
||||||
import { GoogleOAuthButton } from "@/components/auth/GoogleOAuthButton";
|
import { GoogleOAuthButton } from "@/components/auth/GoogleOAuthButton";
|
||||||
|
import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
|
import { Controller, FormProvider } from "react-hook-form";
|
||||||
import { LoadingLogin } from "./components/LoadingLogin";
|
import { LoadingLogin } from "./components/LoadingLogin";
|
||||||
import { useLoginPage } from "./useLoginPage";
|
import { useLoginPage } from "./useLoginPage";
|
||||||
import { MobileWarningBanner } from "@/components/auth/MobileWarningBanner";
|
|
||||||
import { useSearchParams } from "next/navigation";
|
import { useSearchParams } from "next/navigation";
|
||||||
|
|
||||||
export default function LoginPage() {
|
export default function LoginPage() {
|
||||||
@@ -30,7 +30,7 @@ export default function LoginPage() {
|
|||||||
isCloudEnv,
|
isCloudEnv,
|
||||||
isUserLoading,
|
isUserLoading,
|
||||||
showNotAllowedModal,
|
showNotAllowedModal,
|
||||||
isSupabaseAvailable,
|
isAuthAvailable,
|
||||||
handleSubmit,
|
handleSubmit,
|
||||||
handleProviderLogin,
|
handleProviderLogin,
|
||||||
handleCloseNotAllowedModal,
|
handleCloseNotAllowedModal,
|
||||||
@@ -40,20 +40,16 @@ export default function LoginPage() {
|
|||||||
return <LoadingLogin />;
|
return <LoadingLogin />;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!isSupabaseAvailable) {
|
if (!isAuthAvailable) {
|
||||||
return (
|
return <div>User accounts are disabled because auth is unavailable</div>;
|
||||||
<div>
|
|
||||||
User accounts are disabled because Supabase client is unavailable
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
<div className="flex h-full min-h-[85vh] flex-col items-center justify-center py-10">
|
||||||
<AuthCard title="Login to your account">
|
<AuthCard title="Login to your account">
|
||||||
<Form {...form}>
|
<FormProvider {...form}>
|
||||||
<form onSubmit={handleSubmit} className="flex w-full flex-col gap-1">
|
<form onSubmit={handleSubmit} className="flex w-full flex-col gap-1">
|
||||||
<FormField
|
<Controller
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name="email"
|
name="email"
|
||||||
render={({ field }) => (
|
render={({ field }) => (
|
||||||
@@ -69,7 +65,7 @@ export default function LoginPage() {
|
|||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
/>
|
/>
|
||||||
<FormField
|
<Controller
|
||||||
control={form.control}
|
control={form.control}
|
||||||
name="password"
|
name="password"
|
||||||
render={({ field }) => (
|
render={({ field }) => (
|
||||||
@@ -113,7 +109,7 @@ export default function LoginPage() {
|
|||||||
isError={!!feedback}
|
isError={!!feedback}
|
||||||
behaveAs={environment.getBehaveAs()}
|
behaveAs={environment.getBehaveAs()}
|
||||||
/>
|
/>
|
||||||
</Form>
|
</FormProvider>
|
||||||
<AuthCard.BottomText
|
<AuthCard.BottomText
|
||||||
text="Don't have an account?"
|
text="Don't have an account?"
|
||||||
link={{ text: "Sign up", href: signupHref }}
|
link={{ text: "Sign up", href: signupHref }}
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useAuth, broadcastLogin } from "@/lib/auth";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
@@ -10,7 +10,7 @@ import z from "zod";
|
|||||||
import { login as loginAction } from "./actions";
|
import { login as loginAction } from "./actions";
|
||||||
|
|
||||||
export function useLoginPage() {
|
export function useLoginPage() {
|
||||||
const { supabase, user, isUserLoading, isLoggedIn } = useSupabase();
|
const { user, isUserLoading, isLoggedIn, validateSession } = useAuth();
|
||||||
const [feedback, setFeedback] = useState<string | null>(null);
|
const [feedback, setFeedback] = useState<string | null>(null);
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
@@ -93,6 +93,10 @@ export function useLoginPage() {
|
|||||||
throw new Error(result.error || "Login failed");
|
throw new Error(result.error || "Login failed");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Broadcast login to other tabs and validate session to update client state
|
||||||
|
broadcastLogin();
|
||||||
|
await validateSession();
|
||||||
|
|
||||||
if (nextUrl) {
|
if (nextUrl) {
|
||||||
router.replace(nextUrl);
|
router.replace(nextUrl);
|
||||||
} else if (result.onboarding) {
|
} else if (result.onboarding) {
|
||||||
@@ -122,7 +126,7 @@ export function useLoginPage() {
|
|||||||
isCloudEnv,
|
isCloudEnv,
|
||||||
isUserLoading,
|
isUserLoading,
|
||||||
showNotAllowedModal,
|
showNotAllowedModal,
|
||||||
isSupabaseAvailable: !!supabase,
|
isAuthAvailable: true, // Always available with native auth
|
||||||
handleSubmit: form.handleSubmit(handleLogin),
|
handleSubmit: form.handleSubmit(handleLogin),
|
||||||
handleProviderLogin,
|
handleProviderLogin,
|
||||||
handleCloseNotAllowedModal: () => setShowNotAllowedModal(false),
|
handleCloseNotAllowedModal: () => setShowNotAllowedModal(false),
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user