mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 00:28:31 -05:00
Compare commits
23 Commits
native-aut
...
hackathon-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0cbf7db58c | ||
|
|
e503126170 | ||
|
|
7ee28197a3 | ||
|
|
818de26d24 | ||
|
|
cb08def96c | ||
|
|
ac2daee5f8 | ||
|
|
266e0d79d4 | ||
|
|
01f443190e | ||
|
|
bdba0033de | ||
|
|
b87c64ce38 | ||
|
|
003affca43 | ||
|
|
290d0d9a9b | ||
|
|
fba61c72ed | ||
|
|
79d45a15d0 | ||
|
|
66f0d97ca2 | ||
|
|
5894a8fcdf | ||
|
|
dff8efa35d | ||
|
|
e26822998f | ||
|
|
88731b1f76 | ||
|
|
c3e407ef09 | ||
|
|
08a60dcb9b | ||
|
|
de78d062a9 | ||
|
|
217e3718d7 |
8
.github/copilot-instructions.md
vendored
8
.github/copilot-instructions.md
vendored
@@ -142,7 +142,7 @@ pnpm storybook # Start component development server
|
||||
### Security & Middleware
|
||||
|
||||
**Cache Protection**: Backend includes middleware preventing sensitive data caching in browsers/proxies
|
||||
**Authentication**: JWT-based with native authentication
|
||||
**Authentication**: JWT-based with Supabase integration
|
||||
**User ID Validation**: All data access requires user ID checks - verify this for any `data/*.py` changes
|
||||
|
||||
### Development Workflow
|
||||
@@ -168,9 +168,9 @@ pnpm storybook # Start component development server
|
||||
|
||||
- `frontend/src/app/layout.tsx` - Root application layout
|
||||
- `frontend/src/app/page.tsx` - Home page
|
||||
- `frontend/src/lib/auth/` - Authentication client
|
||||
- `frontend/src/lib/supabase/` - Authentication and database client
|
||||
|
||||
**Protected Routes**: Update `frontend/middleware.ts` when adding protected routes
|
||||
**Protected Routes**: Update `frontend/lib/supabase/middleware.ts` when adding protected routes
|
||||
|
||||
### Agent Block System
|
||||
|
||||
@@ -194,7 +194,7 @@ Agents are built using a visual block-based system where each block performs a s
|
||||
|
||||
1. **Backend**: `/backend/.env.default` → `/backend/.env` (user overrides)
|
||||
2. **Frontend**: `/frontend/.env.default` → `/frontend/.env` (user overrides)
|
||||
3. **Platform**: `/.env.default` (shared) → `/.env` (user overrides)
|
||||
3. **Platform**: `/.env.default` (Supabase/shared) → `/.env` (user overrides)
|
||||
4. Docker Compose `environment:` sections override file-based config
|
||||
5. Shell environment variables have highest precedence
|
||||
|
||||
|
||||
6
.github/workflows/claude-dependabot.yml
vendored
6
.github/workflows/claude-dependabot.yml
vendored
@@ -144,7 +144,11 @@ jobs:
|
||||
"rabbitmq:management"
|
||||
"clamav/clamav-debian:latest"
|
||||
"busybox:latest"
|
||||
"pgvector/pgvector:pg18"
|
||||
"kong:2.8.1"
|
||||
"supabase/gotrue:v2.170.0"
|
||||
"supabase/postgres:15.8.1.049"
|
||||
"supabase/postgres-meta:v0.86.1"
|
||||
"supabase/studio:20250224-d10db0f"
|
||||
)
|
||||
|
||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||
|
||||
6
.github/workflows/claude.yml
vendored
6
.github/workflows/claude.yml
vendored
@@ -160,7 +160,11 @@ jobs:
|
||||
"rabbitmq:management"
|
||||
"clamav/clamav-debian:latest"
|
||||
"busybox:latest"
|
||||
"pgvector/pgvector:pg18"
|
||||
"kong:2.8.1"
|
||||
"supabase/gotrue:v2.170.0"
|
||||
"supabase/postgres:15.8.1.049"
|
||||
"supabase/postgres-meta:v0.86.1"
|
||||
"supabase/studio:20250224-d10db0f"
|
||||
)
|
||||
|
||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||
|
||||
6
.github/workflows/copilot-setup-steps.yml
vendored
6
.github/workflows/copilot-setup-steps.yml
vendored
@@ -142,7 +142,11 @@ jobs:
|
||||
"rabbitmq:management"
|
||||
"clamav/clamav-debian:latest"
|
||||
"busybox:latest"
|
||||
"pgvector/pgvector:pg18"
|
||||
"kong:2.8.1"
|
||||
"supabase/gotrue:v2.170.0"
|
||||
"supabase/postgres:15.8.1.049"
|
||||
"supabase/postgres-meta:v0.86.1"
|
||||
"supabase/studio:20250224-d10db0f"
|
||||
)
|
||||
|
||||
# Check if any cached tar files exist (more reliable than cache-hit)
|
||||
|
||||
44
.github/workflows/platform-backend-ci.yml
vendored
44
.github/workflows/platform-backend-ci.yml
vendored
@@ -2,13 +2,13 @@ name: AutoGPT Platform - Backend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev, ci-test*, native-auth]
|
||||
branches: [master, dev, ci-test*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
pull_request:
|
||||
branches: [master, dev, release-*, native-auth]
|
||||
branches: [master, dev, release-*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
@@ -36,19 +36,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: pgvector/pgvector:pg18
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
POSTGRES_DB: postgres
|
||||
options: >-
|
||||
--health-cmd "pg_isready -U postgres"
|
||||
--health-interval 5s
|
||||
--health-timeout 5s
|
||||
--health-retries 10
|
||||
redis:
|
||||
image: redis:latest
|
||||
ports:
|
||||
@@ -91,6 +78,11 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Setup Supabase
|
||||
uses: supabase/setup-cli@v1
|
||||
with:
|
||||
version: 1.178.1
|
||||
|
||||
- id: get_date
|
||||
name: Get date
|
||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
@@ -144,6 +136,16 @@ jobs:
|
||||
- name: Generate Prisma Client
|
||||
run: poetry run prisma generate
|
||||
|
||||
- id: supabase
|
||||
name: Start Supabase
|
||||
working-directory: .
|
||||
run: |
|
||||
supabase init
|
||||
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
|
||||
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
|
||||
# outputs:
|
||||
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
|
||||
|
||||
- name: Wait for ClamAV to be ready
|
||||
run: |
|
||||
echo "Waiting for ClamAV daemon to start..."
|
||||
@@ -176,8 +178,8 @@ jobs:
|
||||
- name: Run Database Migrations
|
||||
run: poetry run prisma migrate dev --name updates
|
||||
env:
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
|
||||
- id: lint
|
||||
name: Run Linter
|
||||
@@ -193,9 +195,11 @@ jobs:
|
||||
if: success() || (failure() && steps.lint.outcome == 'failure')
|
||||
env:
|
||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@localhost:5432/postgres
|
||||
JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||
JWT_VERIFY_KEY: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||
REDIS_HOST: "localhost"
|
||||
REDIS_PORT: "6379"
|
||||
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=" # DO NOT USE IN PRODUCTION!!
|
||||
|
||||
5
.github/workflows/platform-frontend-ci.yml
vendored
5
.github/workflows/platform-frontend-ci.yml
vendored
@@ -2,12 +2,11 @@ name: AutoGPT Platform - Frontend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev, native-auth]
|
||||
branches: [master, dev]
|
||||
paths:
|
||||
- ".github/workflows/platform-frontend-ci.yml"
|
||||
- "autogpt_platform/frontend/**"
|
||||
pull_request:
|
||||
branches: [master, dev, native-auth]
|
||||
paths:
|
||||
- ".github/workflows/platform-frontend-ci.yml"
|
||||
- "autogpt_platform/frontend/**"
|
||||
@@ -148,7 +147,7 @@ jobs:
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Copy default platform .env
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.default ../.env
|
||||
|
||||
|
||||
56
.github/workflows/platform-fullstack-ci.yml
vendored
56
.github/workflows/platform-fullstack-ci.yml
vendored
@@ -1,13 +1,12 @@
|
||||
name: AutoGPT Platform - Fullstack CI
|
||||
name: AutoGPT Platform - Frontend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev, native-auth]
|
||||
branches: [master, dev]
|
||||
paths:
|
||||
- ".github/workflows/platform-fullstack-ci.yml"
|
||||
- "autogpt_platform/**"
|
||||
pull_request:
|
||||
branches: [master, dev, native-auth]
|
||||
paths:
|
||||
- ".github/workflows/platform-fullstack-ci.yml"
|
||||
- "autogpt_platform/**"
|
||||
@@ -59,11 +58,14 @@ jobs:
|
||||
types:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
timeout-minutes: 10
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
@@ -73,6 +75,18 @@ jobs:
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.default ../.env
|
||||
|
||||
- name: Copy backend .env
|
||||
run: |
|
||||
cp ../backend/.env.default ../backend/.env
|
||||
|
||||
- name: Run docker compose
|
||||
run: |
|
||||
docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -87,12 +101,36 @@ jobs:
|
||||
- name: Setup .env
|
||||
run: cp .env.default .env
|
||||
|
||||
- name: Wait for services to be ready
|
||||
run: |
|
||||
echo "Waiting for rest_server to be ready..."
|
||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||
echo "Waiting for database to be ready..."
|
||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||
|
||||
- name: Generate API queries
|
||||
run: pnpm generate:api
|
||||
run: pnpm generate:api:force
|
||||
|
||||
- name: Check for API schema changes
|
||||
run: |
|
||||
if ! git diff --exit-code src/app/api/openapi.json; then
|
||||
echo "❌ API schema changes detected in src/app/api/openapi.json"
|
||||
echo ""
|
||||
echo "The openapi.json file has been modified after running 'pnpm generate:api-all'."
|
||||
echo "This usually means changes have been made in the BE endpoints without updating the Frontend."
|
||||
echo "The API schema is now out of sync with the Front-end queries."
|
||||
echo ""
|
||||
echo "To fix this:"
|
||||
echo "1. Pull the backend 'docker compose pull && docker compose up -d --build --force-recreate'"
|
||||
echo "2. Run 'pnpm generate:api' locally"
|
||||
echo "3. Run 'pnpm types' locally"
|
||||
echo "4. Fix any TypeScript errors that may have been introduced"
|
||||
echo "5. Commit and push your changes"
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "✅ No API schema changes detected"
|
||||
fi
|
||||
|
||||
- name: Run Typescript checks
|
||||
run: pnpm types
|
||||
|
||||
env:
|
||||
CI: true
|
||||
PLAIN_OUTPUT: True
|
||||
|
||||
@@ -49,5 +49,5 @@ Use conventional commit messages for all commits (e.g. `feat(backend): add API`)
|
||||
- Keep out-of-scope changes under 20% of the PR.
|
||||
- Ensure PR descriptions are complete.
|
||||
- For changes touching `data/*.py`, validate user ID checks or explain why not needed.
|
||||
- If adding protected frontend routes, update `frontend/lib/auth/helpers.ts`.
|
||||
- If adding protected frontend routes, update `frontend/lib/supabase/middleware.ts`.
|
||||
- Use the linear ticket branch structure if given codex/open-1668-resume-dropped-runs
|
||||
|
||||
@@ -5,6 +5,12 @@
|
||||
|
||||
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME=supabase
|
||||
DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
|
||||
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||
|
||||
|
||||
############
|
||||
@@ -18,31 +24,100 @@ POSTGRES_PORT=5432
|
||||
|
||||
|
||||
############
|
||||
# Auth - Native authentication configuration
|
||||
# Supavisor -- Database pooler
|
||||
############
|
||||
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||
POOLER_DEFAULT_POOL_SIZE=20
|
||||
POOLER_MAX_CLIENT_CONN=100
|
||||
POOLER_TENANT_ID=your-tenant-id
|
||||
|
||||
|
||||
############
|
||||
# API Proxy - Configuration for the Kong Reverse proxy.
|
||||
############
|
||||
|
||||
KONG_HTTP_PORT=8000
|
||||
KONG_HTTPS_PORT=8443
|
||||
|
||||
|
||||
############
|
||||
# API - Configuration for PostgREST.
|
||||
############
|
||||
|
||||
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||
|
||||
|
||||
############
|
||||
# Auth - Configuration for the GoTrue authentication server.
|
||||
############
|
||||
|
||||
## General
|
||||
SITE_URL=http://localhost:3000
|
||||
ADDITIONAL_REDIRECT_URLS=
|
||||
JWT_EXPIRY=3600
|
||||
DISABLE_SIGNUP=false
|
||||
API_EXTERNAL_URL=http://localhost:8000
|
||||
|
||||
# JWT token configuration
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=15
|
||||
REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||
JWT_ISSUER=autogpt-platform
|
||||
## Mailer Config
|
||||
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||
|
||||
# Google OAuth (optional)
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
## Email auth
|
||||
ENABLE_EMAIL_SIGNUP=true
|
||||
ENABLE_EMAIL_AUTOCONFIRM=false
|
||||
SMTP_ADMIN_EMAIL=admin@example.com
|
||||
SMTP_HOST=supabase-mail
|
||||
SMTP_PORT=2500
|
||||
SMTP_USER=fake_mail_user
|
||||
SMTP_PASS=fake_mail_password
|
||||
SMTP_SENDER_NAME=fake_sender
|
||||
ENABLE_ANONYMOUS_USERS=false
|
||||
|
||||
## Phone auth
|
||||
ENABLE_PHONE_SIGNUP=true
|
||||
ENABLE_PHONE_AUTOCONFIRM=true
|
||||
|
||||
|
||||
############
|
||||
# Email configuration (optional)
|
||||
# Studio - Configuration for the Dashboard
|
||||
############
|
||||
|
||||
SMTP_HOST=
|
||||
SMTP_PORT=587
|
||||
SMTP_USER=
|
||||
SMTP_PASS=
|
||||
SMTP_FROM_EMAIL=noreply@example.com
|
||||
STUDIO_DEFAULT_ORGANIZATION=Default Organization
|
||||
STUDIO_DEFAULT_PROJECT=Default Project
|
||||
|
||||
STUDIO_PORT=3000
|
||||
# replace if you intend to use Studio outside of localhost
|
||||
SUPABASE_PUBLIC_URL=http://localhost:8000
|
||||
|
||||
# Enable webp support
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||
|
||||
# Add your OpenAI API key to enable SQL Editor Assistant
|
||||
OPENAI_API_KEY=
|
||||
|
||||
|
||||
############
|
||||
# Functions - Configuration for Functions
|
||||
############
|
||||
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
|
||||
FUNCTIONS_VERIFY_JWT=false
|
||||
|
||||
|
||||
############
|
||||
# Logs - Configuration for Logflare
|
||||
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
|
||||
############
|
||||
|
||||
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Change vector.toml sinks to reflect this change
|
||||
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Docker socket location - this value will differ depending on your OS
|
||||
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
||||
|
||||
# Google Cloud Project details
|
||||
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
||||
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.PHONY: start-core stop-core logs-core format lint migrate run-backend run-frontend load-store-agents
|
||||
|
||||
# Run just PostgreSQL + Redis + RabbitMQ + ClamAV
|
||||
# Run just Supabase + Redis + RabbitMQ
|
||||
start-core:
|
||||
docker compose up -d deps
|
||||
|
||||
@@ -49,7 +49,7 @@ load-store-agents:
|
||||
help:
|
||||
@echo "Usage: make <target>"
|
||||
@echo "Targets:"
|
||||
@echo " start-core - Start just the core services (PostgreSQL, Redis, RabbitMQ, ClamAV) in background"
|
||||
@echo " start-core - Start just the core services (Supabase, Redis, RabbitMQ) in background"
|
||||
@echo " stop-core - Stop the core services"
|
||||
@echo " reset-db - Reset the database by deleting the volume"
|
||||
@echo " logs-core - Tail the logs for core services"
|
||||
|
||||
@@ -16,37 +16,17 @@ ALGO_RECOMMENDATION = (
|
||||
"We highly recommend using an asymmetric algorithm such as ES256, "
|
||||
"because when leaked, a shared secret would allow anyone to "
|
||||
"forge valid tokens and impersonate users. "
|
||||
"More info: https://pyjwt.readthedocs.io/en/stable/algorithms.html"
|
||||
"More info: https://supabase.com/docs/guides/auth/signing-keys#choosing-the-right-signing-algorithm" # noqa
|
||||
)
|
||||
|
||||
|
||||
class Settings:
|
||||
def __init__(self):
|
||||
# JWT verification key (public key for asymmetric, shared secret for symmetric)
|
||||
self.JWT_VERIFY_KEY: str = os.getenv(
|
||||
"JWT_VERIFY_KEY", os.getenv("SUPABASE_JWT_SECRET", "")
|
||||
).strip()
|
||||
|
||||
# JWT signing key (private key for asymmetric, shared secret for symmetric)
|
||||
# Falls back to JWT_VERIFY_KEY for symmetric algorithms like HS256
|
||||
self.JWT_SIGN_KEY: str = os.getenv("JWT_SIGN_KEY", self.JWT_VERIFY_KEY).strip()
|
||||
|
||||
self.JWT_ALGORITHM: str = os.getenv("JWT_SIGN_ALGORITHM", "HS256").strip()
|
||||
|
||||
# Token expiration settings
|
||||
self.ACCESS_TOKEN_EXPIRE_MINUTES: int = int(
|
||||
os.getenv("ACCESS_TOKEN_EXPIRE_MINUTES", "15")
|
||||
)
|
||||
self.REFRESH_TOKEN_EXPIRE_DAYS: int = int(
|
||||
os.getenv("REFRESH_TOKEN_EXPIRE_DAYS", "7")
|
||||
)
|
||||
|
||||
# JWT issuer claim
|
||||
self.JWT_ISSUER: str = os.getenv("JWT_ISSUER", "autogpt-platform").strip()
|
||||
|
||||
# JWT audience claim
|
||||
self.JWT_AUDIENCE: str = os.getenv("JWT_AUDIENCE", "authenticated").strip()
|
||||
|
||||
self.validate()
|
||||
|
||||
def validate(self):
|
||||
|
||||
@@ -1,29 +1,25 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from .jwt_utils import bearer_jwt_auth
|
||||
|
||||
|
||||
def add_auth_responses_to_openapi(app: FastAPI) -> None:
|
||||
"""
|
||||
Set up custom OpenAPI schema generation that adds 401 responses
|
||||
Patch a FastAPI instance's `openapi()` method to add 401 responses
|
||||
to all authenticated endpoints.
|
||||
|
||||
This is needed when using HTTPBearer with auto_error=False to get proper
|
||||
401 responses instead of 403, but FastAPI only automatically adds security
|
||||
responses when auto_error=True.
|
||||
"""
|
||||
# Wrap current method to allow stacking OpenAPI schema modifiers like this
|
||||
wrapped_openapi = app.openapi
|
||||
|
||||
def custom_openapi():
|
||||
if app.openapi_schema:
|
||||
return app.openapi_schema
|
||||
|
||||
openapi_schema = get_openapi(
|
||||
title=app.title,
|
||||
version=app.version,
|
||||
description=app.description,
|
||||
routes=app.routes,
|
||||
)
|
||||
openapi_schema = wrapped_openapi()
|
||||
|
||||
# Add 401 response to all endpoints that have security requirements
|
||||
for path, methods in openapi_schema["paths"].items():
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import secrets
|
||||
import uuid
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
@@ -20,57 +16,6 @@ bearer_jwt_auth = HTTPBearer(
|
||||
)
|
||||
|
||||
|
||||
def create_access_token(
|
||||
user_id: str,
|
||||
email: str,
|
||||
role: str = "authenticated",
|
||||
email_verified: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Generate a new JWT access token.
|
||||
|
||||
:param user_id: The user's unique identifier
|
||||
:param email: The user's email address
|
||||
:param role: The user's role (default: "authenticated")
|
||||
:param email_verified: Whether the user's email is verified
|
||||
:return: Encoded JWT token
|
||||
"""
|
||||
settings = get_settings()
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
payload = {
|
||||
"sub": user_id,
|
||||
"email": email,
|
||||
"role": role,
|
||||
"email_verified": email_verified,
|
||||
"aud": settings.JWT_AUDIENCE,
|
||||
"iss": settings.JWT_ISSUER,
|
||||
"iat": now,
|
||||
"exp": now + timedelta(minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES),
|
||||
"jti": str(uuid.uuid4()), # Unique token ID
|
||||
}
|
||||
|
||||
return jwt.encode(payload, settings.JWT_SIGN_KEY, algorithm=settings.JWT_ALGORITHM)
|
||||
|
||||
|
||||
def create_refresh_token() -> tuple[str, str]:
|
||||
"""
|
||||
Generate a new refresh token.
|
||||
|
||||
Returns a tuple of (raw_token, hashed_token).
|
||||
The raw token should be sent to the client.
|
||||
The hashed token should be stored in the database.
|
||||
"""
|
||||
raw_token = secrets.token_urlsafe(64)
|
||||
hashed_token = hashlib.sha256(raw_token.encode()).hexdigest()
|
||||
return raw_token, hashed_token
|
||||
|
||||
|
||||
def hash_token(token: str) -> str:
|
||||
"""Hash a token using SHA-256."""
|
||||
return hashlib.sha256(token.encode()).hexdigest()
|
||||
|
||||
|
||||
async def get_jwt_payload(
|
||||
credentials: HTTPAuthorizationCredentials | None = Security(bearer_jwt_auth),
|
||||
) -> dict[str, Any]:
|
||||
@@ -107,19 +52,11 @@ def parse_jwt_token(token: str) -> dict[str, Any]:
|
||||
"""
|
||||
settings = get_settings()
|
||||
try:
|
||||
# Build decode options
|
||||
options = {
|
||||
"verify_aud": True,
|
||||
"verify_iss": bool(settings.JWT_ISSUER),
|
||||
}
|
||||
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings.JWT_VERIFY_KEY,
|
||||
algorithms=[settings.JWT_ALGORITHM],
|
||||
audience=settings.JWT_AUDIENCE,
|
||||
issuer=settings.JWT_ISSUER if settings.JWT_ISSUER else None,
|
||||
options=options,
|
||||
audience="authenticated",
|
||||
)
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
|
||||
@@ -11,7 +11,6 @@ class User:
|
||||
email: str
|
||||
phone_number: str
|
||||
role: str
|
||||
email_verified: bool = False
|
||||
|
||||
@classmethod
|
||||
def from_payload(cls, payload):
|
||||
@@ -19,6 +18,5 @@ class User:
|
||||
user_id=payload["sub"],
|
||||
email=payload.get("email", ""),
|
||||
phone_number=payload.get("phone", ""),
|
||||
role=payload.get("role", "authenticated"),
|
||||
email_verified=payload.get("email_verified", False),
|
||||
role=payload["role"],
|
||||
)
|
||||
|
||||
414
autogpt_platform/autogpt_libs/poetry.lock
generated
414
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -48,21 +48,6 @@ files = [
|
||||
{file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "authlib"
|
||||
version = "1.6.6"
|
||||
description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd"},
|
||||
{file = "authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cryptography = "*"
|
||||
|
||||
[[package]]
|
||||
name = "backports-asyncio-runner"
|
||||
version = "1.2.0"
|
||||
@@ -76,71 +61,6 @@ files = [
|
||||
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bcrypt"
|
||||
version = "4.3.0"
|
||||
description = "Modern password hashing for your software and your servers"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-macosx_10_12_universal2.whl", hash = "sha256:f01e060f14b6b57bbb72fc5b4a83ac21c443c9a2ee708e04a10e9192f90a6281"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5eeac541cefd0bb887a371ef73c62c3cd78535e4887b310626036a7c0a817bb"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59e1aa0e2cd871b08ca146ed08445038f42ff75968c7ae50d2fdd7860ade2180"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:0042b2e342e9ae3d2ed22727c1262f76cc4f345683b5c1715f0250cf4277294f"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74a8d21a09f5e025a9a23e7c0fd2c7fe8e7503e4d356c0a2c1486ba010619f09"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:0142b2cb84a009f8452c8c5a33ace5e3dfec4159e7735f5afe9a4d50a8ea722d"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_aarch64.whl", hash = "sha256:12fa6ce40cde3f0b899729dbd7d5e8811cb892d31b6f7d0334a1f37748b789fd"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-manylinux_2_34_x86_64.whl", hash = "sha256:5bd3cca1f2aa5dbcf39e2aa13dd094ea181f48959e1071265de49cc2b82525af"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:335a420cfd63fc5bc27308e929bee231c15c85cc4c496610ffb17923abf7f231"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:0e30e5e67aed0187a1764911af023043b4542e70a7461ad20e837e94d23e1d6c"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b8d62290ebefd49ee0b3ce7500f5dbdcf13b81402c05f6dafab9a1e1b27212f"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2ef6630e0ec01376f59a006dc72918b1bf436c3b571b80fa1968d775fa02fe7d"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-win32.whl", hash = "sha256:7a4be4cbf241afee43f1c3969b9103a41b40bcb3a3f467ab19f891d9bc4642e4"},
|
||||
{file = "bcrypt-4.3.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c1949bf259a388863ced887c7861da1df681cb2388645766c89fdfd9004c669"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl", hash = "sha256:f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-win32.whl", hash = "sha256:67a561c4d9fb9465ec866177e7aebcad08fe23aaf6fbd692a6fab69088abfc51"},
|
||||
{file = "bcrypt-4.3.0-cp38-abi3-win_amd64.whl", hash = "sha256:584027857bc2843772114717a7490a37f68da563b3620f78a849bcb54dc11e62"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-win32.whl", hash = "sha256:b4d4e57f0a63fd0b358eb765063ff661328f69a04494427265950c71b992a39a"},
|
||||
{file = "bcrypt-4.3.0-cp39-abi3-win_amd64.whl", hash = "sha256:e53e074b120f2877a35cc6c736b8eb161377caae8925c17688bd46ba56daaa5b"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492"},
|
||||
{file = "bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8"},
|
||||
{file = "bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938"},
|
||||
{file = "bcrypt-4.3.0.tar.gz", hash = "sha256:3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
tests = ["pytest (>=3.2.1,!=3.3.0)"]
|
||||
typecheck = ["mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "cachetools"
|
||||
version = "5.5.2"
|
||||
@@ -539,6 +459,21 @@ ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
name = "deprecation"
|
||||
version = "2.1.0"
|
||||
description = "A library to handle automated deprecations"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"},
|
||||
{file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
packaging = "*"
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.0"
|
||||
@@ -760,6 +695,23 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4
|
||||
[package.extras]
|
||||
grpc = ["grpcio (>=1.44.0,<2.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "gotrue"
|
||||
version = "2.12.3"
|
||||
description = "Python Client Library for Supabase Auth"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "gotrue-2.12.3-py3-none-any.whl", hash = "sha256:b1a3c6a5fe3f92e854a026c4c19de58706a96fd5fbdcc3d620b2802f6a46a26b"},
|
||||
{file = "gotrue-2.12.3.tar.gz", hash = "sha256:f874cf9d0b2f0335bfbd0d6e29e3f7aff79998cd1c14d2ad814db8c06cee3852"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
pydantic = ">=1.10,<3"
|
||||
pyjwt = ">=2.10.1,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "grpc-google-iam-v1"
|
||||
version = "0.14.2"
|
||||
@@ -870,6 +822,94 @@ files = [
|
||||
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "4.2.0"
|
||||
description = "Pure-Python HTTP/2 protocol implementation"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "h2-4.2.0-py3-none-any.whl", hash = "sha256:479a53ad425bb29af087f3458a61d30780bc818e4ebcf01f0b536ba916462ed0"},
|
||||
{file = "h2-4.2.0.tar.gz", hash = "sha256:c8a52129695e88b1a0578d8d2cc6842bbd79128ac685463b887ee278126ad01f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
hpack = ">=4.1,<5"
|
||||
hyperframe = ">=6.1,<7"
|
||||
|
||||
[[package]]
|
||||
name = "hpack"
|
||||
version = "4.1.0"
|
||||
description = "Pure-Python HPACK header encoding"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "hpack-4.1.0-py3-none-any.whl", hash = "sha256:157ac792668d995c657d93111f46b4535ed114f0c9c8d672271bbec7eae1b496"},
|
||||
{file = "hpack-4.1.0.tar.gz", hash = "sha256:ec5eca154f7056aa06f196a557655c5b009b382873ac8d1e66e79e87535f1dca"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httpcore"
|
||||
version = "1.0.9"
|
||||
description = "A minimal low-level HTTP client."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
|
||||
{file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = "*"
|
||||
h11 = ">=0.16"
|
||||
|
||||
[package.extras]
|
||||
asyncio = ["anyio (>=4.0,<5.0)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
trio = ["trio (>=0.22.0,<1.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "httpx"
|
||||
version = "0.28.1"
|
||||
description = "The next generation HTTP client."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
|
||||
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = "*"
|
||||
certifi = "*"
|
||||
h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""}
|
||||
httpcore = "==1.*"
|
||||
idna = "*"
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
|
||||
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
|
||||
http2 = ["h2 (>=3,<5)"]
|
||||
socks = ["socksio (==1.*)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "hyperframe"
|
||||
version = "6.1.0"
|
||||
description = "Pure-Python HTTP/2 framing"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "hyperframe-6.1.0-py3-none-any.whl", hash = "sha256:b03380493a519fce58ea5af42e4a42317bf9bd425596f7a0835ffce80f1a42e5"},
|
||||
{file = "hyperframe-6.1.0.tar.gz", hash = "sha256:f630908a00854a7adeabd6382b43923a4c4cd4b821fcb527e6ab9e15382a3b08"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.10"
|
||||
@@ -996,7 +1036,7 @@ version = "25.0"
|
||||
description = "Core utilities for Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"},
|
||||
{file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"},
|
||||
@@ -1018,6 +1058,24 @@ files = [
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["coverage", "pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "1.1.1"
|
||||
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "postgrest-1.1.1-py3-none-any.whl", hash = "sha256:98a6035ee1d14288484bfe36235942c5fb2d26af6d8120dfe3efbe007859251a"},
|
||||
{file = "postgrest-1.1.1.tar.gz", hash = "sha256:f3bb3e8c4602775c75c844a31f565f5f3dd584df4d36d683f0b67d01a86be322"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecation = ">=2.1.0,<3.0.0"
|
||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
pydantic = ">=1.9,<3.0"
|
||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.26.1"
|
||||
@@ -1404,6 +1462,21 @@ pytest = ">=6.2.5"
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "pytest-asyncio", "tox"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.9.0.post0"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
||||
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.1.1"
|
||||
@@ -1419,6 +1492,22 @@ files = [
|
||||
[package.extras]
|
||||
cli = ["click (>=5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.5.3"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "realtime-2.5.3-py3-none-any.whl", hash = "sha256:eb0994636946eff04c4c7f044f980c8c633c7eb632994f549f61053a474ac970"},
|
||||
{file = "realtime-2.5.3.tar.gz", hash = "sha256:0587594f3bc1c84bf007ff625075b86db6528843e03250dc84f4f2808be3d99a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.14.0,<5.0.0"
|
||||
websockets = ">=11,<16"
|
||||
|
||||
[[package]]
|
||||
name = "redis"
|
||||
version = "6.2.0"
|
||||
@@ -1517,6 +1606,18 @@ files = [
|
||||
{file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.17.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
|
||||
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
@@ -1548,6 +1649,76 @@ typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""
|
||||
[package.extras]
|
||||
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
|
||||
|
||||
[[package]]
|
||||
name = "storage3"
|
||||
version = "0.12.0"
|
||||
description = "Supabase Storage client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "storage3-0.12.0-py3-none-any.whl", hash = "sha256:1c4585693ca42243ded1512b58e54c697111e91a20916cd14783eebc37e7c87d"},
|
||||
{file = "storage3-0.12.0.tar.gz", hash = "sha256:94243f20922d57738bf42e96b9f5582b4d166e8bf209eccf20b146909f3f71b0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
deprecation = ">=2.1.0,<3.0.0"
|
||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
python-dateutil = ">=2.8.2,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "strenum"
|
||||
version = "0.4.15"
|
||||
description = "An Enum that inherits from str."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "StrEnum-0.4.15-py3-none-any.whl", hash = "sha256:a30cda4af7cc6b5bf52c8055bc4bf4b2b6b14a93b574626da33df53cf7740659"},
|
||||
{file = "StrEnum-0.4.15.tar.gz", hash = "sha256:878fb5ab705442070e4dd1929bb5e2249511c0bcf2b0eeacf3bcd80875c82eff"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
|
||||
release = ["twine"]
|
||||
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.16.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.16.0-py3-none-any.whl", hash = "sha256:99065caab3d90a56650bf39fbd0e49740995da3738ab28706c61bd7f2401db55"},
|
||||
{file = "supabase-2.16.0.tar.gz", hash = "sha256:98f3810158012d4ec0e3083f2e5515f5e10b32bd71e7d458662140e963c1d164"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
gotrue = ">=2.11.0,<3.0.0"
|
||||
httpx = ">=0.26,<0.29"
|
||||
postgrest = ">0.19,<1.2"
|
||||
realtime = ">=2.4.0,<2.6.0"
|
||||
storage3 = ">=0.10,<0.13"
|
||||
supafunc = ">=0.9,<0.11"
|
||||
|
||||
[[package]]
|
||||
name = "supafunc"
|
||||
version = "0.10.1"
|
||||
description = "Library for Supabase Functions"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supafunc-0.10.1-py3-none-any.whl", hash = "sha256:26df9bd25ff2ef56cb5bfb8962de98f43331f7f8ff69572bac3ed9c3a9672040"},
|
||||
{file = "supafunc-0.10.1.tar.gz", hash = "sha256:a5b33c8baecb6b5297d25da29a2503e2ec67ee6986f3d44c137e651b8a59a17d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
strenum = ">=0.4.15,<0.5.0"
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
@@ -1656,6 +1827,85 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""}
|
||||
[package.extras]
|
||||
standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "websockets"
|
||||
version = "15.0.1"
|
||||
description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"},
|
||||
{file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"},
|
||||
{file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"},
|
||||
{file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"},
|
||||
{file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"},
|
||||
{file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"},
|
||||
{file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"},
|
||||
{file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"},
|
||||
{file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"},
|
||||
{file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zipp"
|
||||
version = "3.23.0"
|
||||
@@ -1679,4 +1929,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "de209c97aa0feb29d669a20e4422d51bdf3a0872ec37e85ce9b88ce726fcee7a"
|
||||
content-hash = "0c40b63c3c921846cf05ccfb4e685d4959854b29c2c302245f9832e20aac6954"
|
||||
|
||||
@@ -18,8 +18,7 @@ pydantic = "^2.11.7"
|
||||
pydantic-settings = "^2.10.1"
|
||||
pyjwt = { version = "^2.10.1", extras = ["crypto"] }
|
||||
redis = "^6.2.0"
|
||||
bcrypt = "^4.1.0"
|
||||
authlib = "^1.3.0"
|
||||
supabase = "^2.16.0"
|
||||
uvicorn = "^0.35.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
||||
@@ -27,15 +27,10 @@ REDIS_PORT=6379
|
||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
|
||||
# JWT Authentication
|
||||
# Generate a secure random key: python -c "import secrets; print(secrets.token_urlsafe(32))"
|
||||
JWT_SIGN_KEY=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
# Supabase Authentication
|
||||
SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
JWT_VERIFY_KEY=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
JWT_SIGN_ALGORITHM=HS256
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES=15
|
||||
REFRESH_TOKEN_EXPIRE_DAYS=7
|
||||
JWT_ISSUER=autogpt-platform
|
||||
JWT_AUDIENCE=authenticated
|
||||
|
||||
## ===== REQUIRED SECURITY KEYS ===== ##
|
||||
# Generate using: from cryptography.fernet import Fernet;Fernet.generate_key().decode()
|
||||
|
||||
3
autogpt_platform/backend/.gitignore
vendored
3
autogpt_platform/backend/.gitignore
vendored
@@ -18,6 +18,3 @@ load-tests/results/
|
||||
load-tests/*.json
|
||||
load-tests/*.log
|
||||
load-tests/node_modules/*
|
||||
|
||||
# Migration backups (contain user data)
|
||||
migration_backups/
|
||||
|
||||
@@ -108,7 +108,7 @@ import fastapi.testclient
|
||||
import pytest
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
from backend.server.v2.myroute import router
|
||||
from backend.api.features.myroute import router
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(router)
|
||||
@@ -149,7 +149,7 @@ These provide the easiest way to set up authentication mocking in test modules:
|
||||
import fastapi
|
||||
import fastapi.testclient
|
||||
import pytest
|
||||
from backend.server.v2.myroute import router
|
||||
from backend.api.features.myroute import router
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(router)
|
||||
|
||||
@@ -3,12 +3,12 @@ from typing import Dict, Set
|
||||
|
||||
from fastapi import WebSocket
|
||||
|
||||
from backend.api.model import NotificationPayload, WSMessage, WSMethod
|
||||
from backend.data.execution import (
|
||||
ExecutionEventType,
|
||||
GraphExecutionEvent,
|
||||
NodeExecutionEvent,
|
||||
)
|
||||
from backend.server.model import NotificationPayload, WSMessage, WSMethod
|
||||
|
||||
_EVENT_TYPE_TO_METHOD_MAP: dict[ExecutionEventType, WSMethod] = {
|
||||
ExecutionEventType.GRAPH_EXEC_UPDATE: WSMethod.GRAPH_EXECUTION_EVENT,
|
||||
@@ -4,13 +4,13 @@ from unittest.mock import AsyncMock
|
||||
import pytest
|
||||
from fastapi import WebSocket
|
||||
|
||||
from backend.api.conn_manager import ConnectionManager
|
||||
from backend.api.model import NotificationPayload, WSMessage, WSMethod
|
||||
from backend.data.execution import (
|
||||
ExecutionStatus,
|
||||
GraphExecutionEvent,
|
||||
NodeExecutionEvent,
|
||||
)
|
||||
from backend.server.conn_manager import ConnectionManager
|
||||
from backend.server.model import NotificationPayload, WSMessage, WSMethod
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
25
autogpt_platform/backend/backend/api/external/fastapi_app.py
vendored
Normal file
25
autogpt_platform/backend/backend/api/external/fastapi_app.py
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
from backend.api.middleware.security import SecurityHeadersMiddleware
|
||||
from backend.monitoring.instrumentation import instrument_fastapi
|
||||
|
||||
from .v1.routes import v1_router
|
||||
|
||||
external_api = FastAPI(
|
||||
title="AutoGPT External API",
|
||||
description="External API for AutoGPT integrations",
|
||||
docs_url="/docs",
|
||||
version="1.0",
|
||||
)
|
||||
|
||||
external_api.add_middleware(SecurityHeadersMiddleware)
|
||||
external_api.include_router(v1_router, prefix="/v1")
|
||||
|
||||
# Add Prometheus instrumentation
|
||||
instrument_fastapi(
|
||||
external_api,
|
||||
service_name="external-api",
|
||||
expose_endpoint=True,
|
||||
endpoint="/metrics",
|
||||
include_in_schema=True,
|
||||
)
|
||||
@@ -16,6 +16,8 @@ from fastapi import APIRouter, Body, HTTPException, Path, Security, status
|
||||
from prisma.enums import APIKeyPermission
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.api.external.middleware import require_permission
|
||||
from backend.api.features.integrations.models import get_all_provider_names
|
||||
from backend.data.auth.base import APIAuthorizationInfo
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
@@ -28,8 +30,6 @@ from backend.data.model import (
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.server.external.middleware import require_permission
|
||||
from backend.server.integrations.models import get_all_provider_names
|
||||
from backend.util.settings import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -8,23 +8,29 @@ from prisma.enums import AgentExecutionStatus, APIKeyPermission
|
||||
from pydantic import BaseModel, Field
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
import backend.api.features.store.cache as store_cache
|
||||
import backend.api.features.store.model as store_model
|
||||
import backend.data.block
|
||||
import backend.server.v2.store.cache as store_cache
|
||||
import backend.server.v2.store.model as store_model
|
||||
from backend.api.external.middleware import require_permission
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data import user as user_db
|
||||
from backend.data.auth.base import APIAuthorizationInfo
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.executor.utils import add_graph_execution
|
||||
from backend.server.external.middleware import require_permission
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from .integrations import integrations_router
|
||||
from .tools import tools_router
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
v1_router = APIRouter()
|
||||
|
||||
v1_router.include_router(integrations_router)
|
||||
v1_router.include_router(tools_router)
|
||||
|
||||
|
||||
class UserInfoResponse(BaseModel):
|
||||
id: str
|
||||
@@ -14,11 +14,11 @@ from fastapi import APIRouter, Security
|
||||
from prisma.enums import APIKeyPermission
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.api.external.middleware import require_permission
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tools import find_agent_tool, run_agent_tool
|
||||
from backend.api.features.chat.tools.models import ToolResponseBase
|
||||
from backend.data.auth.base import APIAuthorizationInfo
|
||||
from backend.server.external.middleware import require_permission
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.server.v2.chat.tools import find_agent_tool, run_agent_tool
|
||||
from backend.server.v2.chat.tools.models import ToolResponseBase
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -6,9 +6,10 @@ from fastapi import APIRouter, Body, Security
|
||||
from prisma.enums import CreditTransactionType
|
||||
|
||||
from backend.data.credit import admin_get_user_history, get_user_credit_model
|
||||
from backend.server.v2.admin.model import AddUserCreditsResponse, UserHistoryResponse
|
||||
from backend.util.json import SafeJson
|
||||
|
||||
from .model import AddUserCreditsResponse, UserHistoryResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -9,14 +9,15 @@ import pytest_mock
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
import backend.server.v2.admin.credit_admin_routes as credit_admin_routes
|
||||
import backend.server.v2.admin.model as admin_model
|
||||
from backend.data.model import UserTransaction
|
||||
from backend.util.json import SafeJson
|
||||
from backend.util.models import Pagination
|
||||
|
||||
from .credit_admin_routes import router as credit_admin_router
|
||||
from .model import UserHistoryResponse
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(credit_admin_routes.router)
|
||||
app.include_router(credit_admin_router)
|
||||
|
||||
client = fastapi.testclient.TestClient(app)
|
||||
|
||||
@@ -30,7 +31,7 @@ def setup_app_admin_auth(mock_jwt_admin):
|
||||
|
||||
|
||||
def test_add_user_credits_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
configured_snapshot: Snapshot,
|
||||
admin_user_id: str,
|
||||
target_user_id: str,
|
||||
@@ -42,7 +43,7 @@ def test_add_user_credits_success(
|
||||
return_value=(1500, "transaction-123-uuid")
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.admin.credit_admin_routes.get_user_credit_model",
|
||||
"backend.api.features.admin.credit_admin_routes.get_user_credit_model",
|
||||
return_value=mock_credit_model,
|
||||
)
|
||||
|
||||
@@ -84,7 +85,7 @@ def test_add_user_credits_success(
|
||||
|
||||
|
||||
def test_add_user_credits_negative_amount(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
"""Test credit deduction by admin (negative amount)"""
|
||||
@@ -94,7 +95,7 @@ def test_add_user_credits_negative_amount(
|
||||
return_value=(200, "transaction-456-uuid")
|
||||
)
|
||||
mocker.patch(
|
||||
"backend.server.v2.admin.credit_admin_routes.get_user_credit_model",
|
||||
"backend.api.features.admin.credit_admin_routes.get_user_credit_model",
|
||||
return_value=mock_credit_model,
|
||||
)
|
||||
|
||||
@@ -119,12 +120,12 @@ def test_add_user_credits_negative_amount(
|
||||
|
||||
|
||||
def test_get_user_history_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
"""Test successful retrieval of user credit history"""
|
||||
# Mock the admin_get_user_history function
|
||||
mock_history_response = admin_model.UserHistoryResponse(
|
||||
mock_history_response = UserHistoryResponse(
|
||||
history=[
|
||||
UserTransaction(
|
||||
user_id="user-1",
|
||||
@@ -150,7 +151,7 @@ def test_get_user_history_success(
|
||||
)
|
||||
|
||||
mocker.patch(
|
||||
"backend.server.v2.admin.credit_admin_routes.admin_get_user_history",
|
||||
"backend.api.features.admin.credit_admin_routes.admin_get_user_history",
|
||||
return_value=mock_history_response,
|
||||
)
|
||||
|
||||
@@ -170,12 +171,12 @@ def test_get_user_history_success(
|
||||
|
||||
|
||||
def test_get_user_history_with_filters(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
"""Test user credit history with search and filter parameters"""
|
||||
# Mock the admin_get_user_history function
|
||||
mock_history_response = admin_model.UserHistoryResponse(
|
||||
mock_history_response = UserHistoryResponse(
|
||||
history=[
|
||||
UserTransaction(
|
||||
user_id="user-3",
|
||||
@@ -194,7 +195,7 @@ def test_get_user_history_with_filters(
|
||||
)
|
||||
|
||||
mock_get_history = mocker.patch(
|
||||
"backend.server.v2.admin.credit_admin_routes.admin_get_user_history",
|
||||
"backend.api.features.admin.credit_admin_routes.admin_get_user_history",
|
||||
return_value=mock_history_response,
|
||||
)
|
||||
|
||||
@@ -230,12 +231,12 @@ def test_get_user_history_with_filters(
|
||||
|
||||
|
||||
def test_get_user_history_empty_results(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
"""Test user credit history with no results"""
|
||||
# Mock empty history response
|
||||
mock_history_response = admin_model.UserHistoryResponse(
|
||||
mock_history_response = UserHistoryResponse(
|
||||
history=[],
|
||||
pagination=Pagination(
|
||||
total_items=0,
|
||||
@@ -246,7 +247,7 @@ def test_get_user_history_empty_results(
|
||||
)
|
||||
|
||||
mocker.patch(
|
||||
"backend.server.v2.admin.credit_admin_routes.admin_get_user_history",
|
||||
"backend.api.features.admin.credit_admin_routes.admin_get_user_history",
|
||||
return_value=mock_history_response,
|
||||
)
|
||||
|
||||
@@ -7,9 +7,9 @@ import fastapi
|
||||
import fastapi.responses
|
||||
import prisma.enums
|
||||
|
||||
import backend.server.v2.store.cache as store_cache
|
||||
import backend.server.v2.store.db
|
||||
import backend.server.v2.store.model
|
||||
import backend.api.features.store.cache as store_cache
|
||||
import backend.api.features.store.db as store_db
|
||||
import backend.api.features.store.model as store_model
|
||||
import backend.util.json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -24,7 +24,7 @@ router = fastapi.APIRouter(
|
||||
@router.get(
|
||||
"/listings",
|
||||
summary="Get Admin Listings History",
|
||||
response_model=backend.server.v2.store.model.StoreListingsWithVersionsResponse,
|
||||
response_model=store_model.StoreListingsWithVersionsResponse,
|
||||
)
|
||||
async def get_admin_listings_with_versions(
|
||||
status: typing.Optional[prisma.enums.SubmissionStatus] = None,
|
||||
@@ -48,7 +48,7 @@ async def get_admin_listings_with_versions(
|
||||
StoreListingsWithVersionsResponse with listings and their versions
|
||||
"""
|
||||
try:
|
||||
listings = await backend.server.v2.store.db.get_admin_listings_with_versions(
|
||||
listings = await store_db.get_admin_listings_with_versions(
|
||||
status=status,
|
||||
search_query=search,
|
||||
page=page,
|
||||
@@ -68,11 +68,11 @@ async def get_admin_listings_with_versions(
|
||||
@router.post(
|
||||
"/submissions/{store_listing_version_id}/review",
|
||||
summary="Review Store Submission",
|
||||
response_model=backend.server.v2.store.model.StoreSubmission,
|
||||
response_model=store_model.StoreSubmission,
|
||||
)
|
||||
async def review_submission(
|
||||
store_listing_version_id: str,
|
||||
request: backend.server.v2.store.model.ReviewSubmissionRequest,
|
||||
request: store_model.ReviewSubmissionRequest,
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
@@ -87,12 +87,10 @@ async def review_submission(
|
||||
StoreSubmission with updated review information
|
||||
"""
|
||||
try:
|
||||
already_approved = (
|
||||
await backend.server.v2.store.db.check_submission_already_approved(
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
)
|
||||
already_approved = await store_db.check_submission_already_approved(
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
)
|
||||
submission = await backend.server.v2.store.db.review_store_submission(
|
||||
submission = await store_db.review_store_submission(
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
is_approved=request.is_approved,
|
||||
external_comments=request.comments,
|
||||
@@ -136,7 +134,7 @@ async def admin_download_agent_file(
|
||||
Raises:
|
||||
HTTPException: If the agent is not found or an unexpected error occurs.
|
||||
"""
|
||||
graph_data = await backend.server.v2.store.db.get_agent_as_admin(
|
||||
graph_data = await store_db.get_agent_as_admin(
|
||||
user_id=user_id,
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
)
|
||||
@@ -6,10 +6,11 @@ from typing import Annotated
|
||||
import fastapi
|
||||
import pydantic
|
||||
from autogpt_libs.auth import get_user_id
|
||||
from autogpt_libs.auth.dependencies import requires_user
|
||||
|
||||
import backend.data.analytics
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
router = fastapi.APIRouter(dependencies=[fastapi.Security(requires_user)])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
340
autogpt_platform/backend/backend/api/features/analytics_test.py
Normal file
340
autogpt_platform/backend/backend/api/features/analytics_test.py
Normal file
@@ -0,0 +1,340 @@
|
||||
"""Tests for analytics API endpoints."""
|
||||
|
||||
import json
|
||||
from unittest.mock import AsyncMock, Mock
|
||||
|
||||
import fastapi
|
||||
import fastapi.testclient
|
||||
import pytest
|
||||
import pytest_mock
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
from .analytics import router as analytics_router
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(analytics_router)
|
||||
|
||||
client = fastapi.testclient.TestClient(app)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module."""
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
yield
|
||||
app.dependency_overrides.clear()
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# /log_raw_metric endpoint tests
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_log_raw_metric_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
configured_snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test successful raw metric logging."""
|
||||
mock_result = Mock(id="metric-123-uuid")
|
||||
mock_log_metric = mocker.patch(
|
||||
"backend.data.analytics.log_raw_metric",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_result,
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"metric_name": "page_load_time",
|
||||
"metric_value": 2.5,
|
||||
"data_string": "/dashboard",
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_metric", json=request_data)
|
||||
|
||||
assert response.status_code == 200, f"Unexpected response: {response.text}"
|
||||
assert response.json() == "metric-123-uuid"
|
||||
|
||||
mock_log_metric.assert_called_once_with(
|
||||
user_id=test_user_id,
|
||||
metric_name="page_load_time",
|
||||
metric_value=2.5,
|
||||
data_string="/dashboard",
|
||||
)
|
||||
|
||||
configured_snapshot.assert_match(
|
||||
json.dumps({"metric_id": response.json()}, indent=2, sort_keys=True),
|
||||
"analytics_log_metric_success",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"metric_value,metric_name,data_string,test_id",
|
||||
[
|
||||
(100, "api_calls_count", "external_api", "integer_value"),
|
||||
(0, "error_count", "no_errors", "zero_value"),
|
||||
(-5.2, "temperature_delta", "cooling", "negative_value"),
|
||||
(1.23456789, "precision_test", "float_precision", "float_precision"),
|
||||
(999999999, "large_number", "max_value", "large_number"),
|
||||
(0.0000001, "tiny_number", "min_value", "tiny_number"),
|
||||
],
|
||||
)
|
||||
def test_log_raw_metric_various_values(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
configured_snapshot: Snapshot,
|
||||
metric_value: float,
|
||||
metric_name: str,
|
||||
data_string: str,
|
||||
test_id: str,
|
||||
) -> None:
|
||||
"""Test raw metric logging with various metric values."""
|
||||
mock_result = Mock(id=f"metric-{test_id}-uuid")
|
||||
mocker.patch(
|
||||
"backend.data.analytics.log_raw_metric",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_result,
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"metric_name": metric_name,
|
||||
"metric_value": metric_value,
|
||||
"data_string": data_string,
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_metric", json=request_data)
|
||||
|
||||
assert response.status_code == 200, f"Failed for {test_id}: {response.text}"
|
||||
|
||||
configured_snapshot.assert_match(
|
||||
json.dumps(
|
||||
{"metric_id": response.json(), "test_case": test_id},
|
||||
indent=2,
|
||||
sort_keys=True,
|
||||
),
|
||||
f"analytics_metric_{test_id}",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_data,expected_error",
|
||||
[
|
||||
({}, "Field required"),
|
||||
({"metric_name": "test"}, "Field required"),
|
||||
(
|
||||
{"metric_name": "test", "metric_value": "not_a_number", "data_string": "x"},
|
||||
"Input should be a valid number",
|
||||
),
|
||||
(
|
||||
{"metric_name": "", "metric_value": 1.0, "data_string": "test"},
|
||||
"String should have at least 1 character",
|
||||
),
|
||||
(
|
||||
{"metric_name": "test", "metric_value": 1.0, "data_string": ""},
|
||||
"String should have at least 1 character",
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
"empty_request",
|
||||
"missing_metric_value_and_data_string",
|
||||
"invalid_metric_value_type",
|
||||
"empty_metric_name",
|
||||
"empty_data_string",
|
||||
],
|
||||
)
|
||||
def test_log_raw_metric_validation_errors(
|
||||
invalid_data: dict,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test validation errors for invalid metric requests."""
|
||||
response = client.post("/log_raw_metric", json=invalid_data)
|
||||
|
||||
assert response.status_code == 422
|
||||
error_detail = response.json()
|
||||
assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}"
|
||||
|
||||
error_text = json.dumps(error_detail)
|
||||
assert (
|
||||
expected_error in error_text
|
||||
), f"Expected '{expected_error}' in error response: {error_text}"
|
||||
|
||||
|
||||
def test_log_raw_metric_service_error(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error handling when analytics service fails."""
|
||||
mocker.patch(
|
||||
"backend.data.analytics.log_raw_metric",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=Exception("Database connection failed"),
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"metric_name": "test_metric",
|
||||
"metric_value": 1.0,
|
||||
"data_string": "test",
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_metric", json=request_data)
|
||||
|
||||
assert response.status_code == 500
|
||||
error_detail = response.json()["detail"]
|
||||
assert "Database connection failed" in error_detail["message"]
|
||||
assert "hint" in error_detail
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# /log_raw_analytics endpoint tests
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_log_raw_analytics_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
configured_snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test successful raw analytics logging."""
|
||||
mock_result = Mock(id="analytics-789-uuid")
|
||||
mock_log_analytics = mocker.patch(
|
||||
"backend.data.analytics.log_raw_analytics",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_result,
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"type": "user_action",
|
||||
"data": {
|
||||
"action": "button_click",
|
||||
"button_id": "submit_form",
|
||||
"timestamp": "2023-01-01T00:00:00Z",
|
||||
"metadata": {"form_type": "registration", "fields_filled": 5},
|
||||
},
|
||||
"data_index": "button_click_submit_form",
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_analytics", json=request_data)
|
||||
|
||||
assert response.status_code == 200, f"Unexpected response: {response.text}"
|
||||
assert response.json() == "analytics-789-uuid"
|
||||
|
||||
mock_log_analytics.assert_called_once_with(
|
||||
test_user_id,
|
||||
"user_action",
|
||||
request_data["data"],
|
||||
"button_click_submit_form",
|
||||
)
|
||||
|
||||
configured_snapshot.assert_match(
|
||||
json.dumps({"analytics_id": response.json()}, indent=2, sort_keys=True),
|
||||
"analytics_log_analytics_success",
|
||||
)
|
||||
|
||||
|
||||
def test_log_raw_analytics_complex_data(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
configured_snapshot: Snapshot,
|
||||
) -> None:
|
||||
"""Test raw analytics logging with complex nested data structures."""
|
||||
mock_result = Mock(id="analytics-complex-uuid")
|
||||
mocker.patch(
|
||||
"backend.data.analytics.log_raw_analytics",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_result,
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"type": "agent_execution",
|
||||
"data": {
|
||||
"agent_id": "agent_123",
|
||||
"execution_id": "exec_456",
|
||||
"status": "completed",
|
||||
"duration_ms": 3500,
|
||||
"nodes_executed": 15,
|
||||
"blocks_used": [
|
||||
{"block_id": "llm_block", "count": 3},
|
||||
{"block_id": "http_block", "count": 5},
|
||||
{"block_id": "code_block", "count": 2},
|
||||
],
|
||||
"errors": [],
|
||||
"metadata": {
|
||||
"trigger": "manual",
|
||||
"user_tier": "premium",
|
||||
"environment": "production",
|
||||
},
|
||||
},
|
||||
"data_index": "agent_123_exec_456",
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_analytics", json=request_data)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
configured_snapshot.assert_match(
|
||||
json.dumps(
|
||||
{"analytics_id": response.json(), "logged_data": request_data["data"]},
|
||||
indent=2,
|
||||
sort_keys=True,
|
||||
),
|
||||
"analytics_log_analytics_complex_data",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_data,expected_error",
|
||||
[
|
||||
({}, "Field required"),
|
||||
({"type": "test"}, "Field required"),
|
||||
(
|
||||
{"type": "test", "data": "not_a_dict", "data_index": "test"},
|
||||
"Input should be a valid dictionary",
|
||||
),
|
||||
({"type": "test", "data": {"key": "value"}}, "Field required"),
|
||||
],
|
||||
ids=[
|
||||
"empty_request",
|
||||
"missing_data_and_data_index",
|
||||
"invalid_data_type",
|
||||
"missing_data_index",
|
||||
],
|
||||
)
|
||||
def test_log_raw_analytics_validation_errors(
|
||||
invalid_data: dict,
|
||||
expected_error: str,
|
||||
) -> None:
|
||||
"""Test validation errors for invalid analytics requests."""
|
||||
response = client.post("/log_raw_analytics", json=invalid_data)
|
||||
|
||||
assert response.status_code == 422
|
||||
error_detail = response.json()
|
||||
assert "detail" in error_detail, f"Missing 'detail' in error: {error_detail}"
|
||||
|
||||
error_text = json.dumps(error_detail)
|
||||
assert (
|
||||
expected_error in error_text
|
||||
), f"Expected '{expected_error}' in error response: {error_text}"
|
||||
|
||||
|
||||
def test_log_raw_analytics_service_error(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error handling when analytics service fails."""
|
||||
mocker.patch(
|
||||
"backend.data.analytics.log_raw_analytics",
|
||||
new_callable=AsyncMock,
|
||||
side_effect=Exception("Analytics DB unreachable"),
|
||||
)
|
||||
|
||||
request_data = {
|
||||
"type": "test_event",
|
||||
"data": {"key": "value"},
|
||||
"data_index": "test_index",
|
||||
}
|
||||
|
||||
response = client.post("/log_raw_analytics", json=request_data)
|
||||
|
||||
assert response.status_code == 500
|
||||
error_detail = response.json()["detail"]
|
||||
assert "Analytics DB unreachable" in error_detail["message"]
|
||||
assert "hint" in error_detail
|
||||
@@ -6,17 +6,20 @@ from typing import Sequence
|
||||
|
||||
import prisma
|
||||
|
||||
import backend.api.features.library.db as library_db
|
||||
import backend.api.features.library.model as library_model
|
||||
import backend.api.features.store.db as store_db
|
||||
import backend.api.features.store.model as store_model
|
||||
import backend.data.block
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.db as store_db
|
||||
import backend.server.v2.store.model as store_model
|
||||
from backend.blocks import load_all_blocks
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.block import AnyBlockSchema, BlockCategory, BlockInfo, BlockSchema
|
||||
from backend.data.db import query_raw_with_schema
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.server.v2.builder.model import (
|
||||
from backend.util.cache import cached
|
||||
from backend.util.models import Pagination
|
||||
|
||||
from .model import (
|
||||
BlockCategoryResponse,
|
||||
BlockResponse,
|
||||
BlockType,
|
||||
@@ -26,8 +29,6 @@ from backend.server.v2.builder.model import (
|
||||
ProviderResponse,
|
||||
SearchEntry,
|
||||
)
|
||||
from backend.util.cache import cached
|
||||
from backend.util.models import Pagination
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
llm_models = [name.name.lower().replace("_", " ") for name in LlmModel]
|
||||
@@ -2,8 +2,8 @@ from typing import Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.model as store_model
|
||||
import backend.api.features.library.model as library_model
|
||||
import backend.api.features.store.model as store_model
|
||||
from backend.data.block import BlockInfo
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
@@ -4,11 +4,12 @@ from typing import Annotated, Sequence
|
||||
import fastapi
|
||||
from autogpt_libs.auth.dependencies import get_user_id, requires_user
|
||||
|
||||
import backend.server.v2.builder.db as builder_db
|
||||
import backend.server.v2.builder.model as builder_model
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
|
||||
from . import db as builder_db
|
||||
from . import model as builder_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = fastapi.APIRouter(
|
||||
@@ -19,9 +19,10 @@ from openai.types.chat.chat_completion_message_tool_call_param import (
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.redis_client import get_redis_async
|
||||
from backend.server.v2.chat.config import ChatConfig
|
||||
from backend.util.exceptions import RedisError
|
||||
|
||||
from .config import ChatConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = ChatConfig()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from backend.server.v2.chat.model import (
|
||||
from .model import (
|
||||
ChatMessage,
|
||||
ChatSession,
|
||||
Usage,
|
||||
@@ -9,10 +9,11 @@ from fastapi import APIRouter, Depends, Query, Security
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
import backend.server.v2.chat.service as chat_service
|
||||
from backend.server.v2.chat.config import ChatConfig
|
||||
from backend.util.exceptions import NotFoundError
|
||||
|
||||
from . import service as chat_service
|
||||
from .config import ChatConfig
|
||||
|
||||
config = ChatConfig()
|
||||
|
||||
|
||||
@@ -7,15 +7,17 @@ import orjson
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam
|
||||
|
||||
import backend.server.v2.chat.config
|
||||
from backend.server.v2.chat.model import (
|
||||
from backend.util.exceptions import NotFoundError
|
||||
|
||||
from .config import ChatConfig
|
||||
from .model import (
|
||||
ChatMessage,
|
||||
ChatSession,
|
||||
Usage,
|
||||
get_chat_session,
|
||||
upsert_chat_session,
|
||||
)
|
||||
from backend.server.v2.chat.response_model import (
|
||||
from .response_model import (
|
||||
StreamBaseResponse,
|
||||
StreamEnd,
|
||||
StreamError,
|
||||
@@ -26,12 +28,11 @@ from backend.server.v2.chat.response_model import (
|
||||
StreamToolExecutionResult,
|
||||
StreamUsage,
|
||||
)
|
||||
from backend.server.v2.chat.tools import execute_tool, tools
|
||||
from backend.util.exceptions import NotFoundError
|
||||
from .tools import execute_tool, tools
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
config = backend.server.v2.chat.config.ChatConfig()
|
||||
config = ChatConfig()
|
||||
client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
|
||||
|
||||
|
||||
@@ -3,8 +3,8 @@ from os import getenv
|
||||
|
||||
import pytest
|
||||
|
||||
import backend.server.v2.chat.service as chat_service
|
||||
from backend.server.v2.chat.response_model import (
|
||||
from . import service as chat_service
|
||||
from .response_model import (
|
||||
StreamEnd,
|
||||
StreamError,
|
||||
StreamTextChunk,
|
||||
@@ -2,14 +2,14 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
|
||||
from .base import BaseTool
|
||||
from .find_agent import FindAgentTool
|
||||
from .run_agent import RunAgentTool
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.server.v2.chat.response_model import StreamToolExecutionResult
|
||||
from backend.api.features.chat.response_model import StreamToolExecutionResult
|
||||
|
||||
# Initialize tool instances
|
||||
find_agent_tool = FindAgentTool()
|
||||
@@ -1,12 +1,12 @@
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
from os import getenv
|
||||
from typing import cast
|
||||
|
||||
import pytest
|
||||
from prisma.types import ProfileCreateInput
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.store import db as store_db
|
||||
from backend.blocks.firecrawl.scrape import FirecrawlScrapeBlock
|
||||
from backend.blocks.io import AgentInputBlock, AgentOutputBlock
|
||||
from backend.blocks.llm import AITextGeneratorBlock
|
||||
@@ -15,8 +15,6 @@ from backend.data.graph import Graph, Link, Node, create_graph
|
||||
from backend.data.model import APIKeyCredentials
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.integrations.credentials_store import IntegrationCredentialsStore
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.server.v2.store import db as store_db
|
||||
|
||||
|
||||
def make_session(user_id: str | None = None):
|
||||
@@ -51,16 +49,13 @@ async def setup_test_data():
|
||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||
username = user.email.split("@")[0]
|
||||
await prisma.profile.create(
|
||||
data=cast(
|
||||
ProfileCreateInput,
|
||||
{
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
},
|
||||
)
|
||||
data={
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
}
|
||||
)
|
||||
|
||||
# 2. Create a test graph with agent input -> agent output
|
||||
@@ -177,16 +172,13 @@ async def setup_llm_test_data():
|
||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||
username = user.email.split("@")[0]
|
||||
await prisma.profile.create(
|
||||
data=cast(
|
||||
ProfileCreateInput,
|
||||
{
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile for LLM tests",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
},
|
||||
)
|
||||
data={
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile for LLM tests",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
}
|
||||
)
|
||||
|
||||
# 2. Create test OpenAI credentials for the user
|
||||
@@ -340,16 +332,13 @@ async def setup_firecrawl_test_data():
|
||||
# 1b. Create a profile with username for the user (required for store agent lookup)
|
||||
username = user.email.split("@")[0]
|
||||
await prisma.profile.create(
|
||||
data=cast(
|
||||
ProfileCreateInput,
|
||||
{
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile for Firecrawl tests",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
},
|
||||
)
|
||||
data={
|
||||
"userId": user.id,
|
||||
"username": username,
|
||||
"name": f"Test User {username}",
|
||||
"description": "Test user profile for Firecrawl tests",
|
||||
"links": [], # Required field - empty array for test profiles
|
||||
}
|
||||
)
|
||||
|
||||
# NOTE: We deliberately do NOT create Firecrawl credentials for this user
|
||||
@@ -5,8 +5,8 @@ from typing import Any
|
||||
|
||||
from openai.types.chat import ChatCompletionToolParam
|
||||
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.server.v2.chat.response_model import StreamToolExecutionResult
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.response_model import StreamToolExecutionResult
|
||||
|
||||
from .models import ErrorResponse, NeedLoginResponse, ToolResponseBase
|
||||
|
||||
@@ -3,17 +3,18 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.server.v2.chat.tools.base import BaseTool
|
||||
from backend.server.v2.chat.tools.models import (
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.store import db as store_db
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
from .base import BaseTool
|
||||
from .models import (
|
||||
AgentCarouselResponse,
|
||||
AgentInfo,
|
||||
ErrorResponse,
|
||||
NoResultsResponse,
|
||||
ToolResponseBase,
|
||||
)
|
||||
from backend.server.v2.store import db as store_db
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -5,14 +5,21 @@ from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field, field_validator
|
||||
|
||||
from backend.api.features.chat.config import ChatConfig
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.user import get_user_by_id
|
||||
from backend.executor import utils as execution_utils
|
||||
from backend.server.v2.chat.config import ChatConfig
|
||||
from backend.server.v2.chat.model import ChatSession
|
||||
from backend.server.v2.chat.tools.base import BaseTool
|
||||
from backend.server.v2.chat.tools.models import (
|
||||
from backend.util.clients import get_scheduler_client
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
from backend.util.timezone_utils import (
|
||||
convert_utc_time_to_user_timezone,
|
||||
get_user_timezone_or_utc,
|
||||
)
|
||||
|
||||
from .base import BaseTool
|
||||
from .models import (
|
||||
AgentDetails,
|
||||
AgentDetailsResponse,
|
||||
ErrorResponse,
|
||||
@@ -23,19 +30,13 @@ from backend.server.v2.chat.tools.models import (
|
||||
ToolResponseBase,
|
||||
UserReadiness,
|
||||
)
|
||||
from backend.server.v2.chat.tools.utils import (
|
||||
from .utils import (
|
||||
check_user_has_required_credentials,
|
||||
extract_credentials_from_schema,
|
||||
fetch_graph_from_store_slug,
|
||||
get_or_create_library_agent,
|
||||
match_user_credentials_to_graph,
|
||||
)
|
||||
from backend.util.clients import get_scheduler_client
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
from backend.util.timezone_utils import (
|
||||
convert_utc_time_to_user_timezone,
|
||||
get_user_timezone_or_utc,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = ChatConfig()
|
||||
@@ -3,13 +3,13 @@ import uuid
|
||||
import orjson
|
||||
import pytest
|
||||
|
||||
from backend.server.v2.chat.tools._test_data import (
|
||||
from ._test_data import (
|
||||
make_session,
|
||||
setup_firecrawl_test_data,
|
||||
setup_llm_test_data,
|
||||
setup_test_data,
|
||||
)
|
||||
from backend.server.v2.chat.tools.run_agent import RunAgentTool
|
||||
from .run_agent import RunAgentTool
|
||||
|
||||
# This is so the formatter doesn't remove the fixture imports
|
||||
setup_llm_test_data = setup_llm_test_data
|
||||
@@ -3,13 +3,13 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from backend.api.features.library import db as library_db
|
||||
from backend.api.features.library import model as library_model
|
||||
from backend.api.features.store import db as store_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.graph import GraphModel
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.server.v2.library import db as library_db
|
||||
from backend.server.v2.library import model as library_model
|
||||
from backend.server.v2.store import db as store_db
|
||||
from backend.util.exceptions import NotFoundError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -7,9 +7,10 @@ import pytest_mock
|
||||
from prisma.enums import ReviewStatus
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
from backend.server.rest_api import handle_internal_http_error
|
||||
from backend.server.v2.executions.review.model import PendingHumanReviewModel
|
||||
from backend.server.v2.executions.review.routes import router
|
||||
from backend.api.rest_api import handle_internal_http_error
|
||||
|
||||
from .model import PendingHumanReviewModel
|
||||
from .routes import router
|
||||
|
||||
# Using a fixed timestamp for reproducible tests
|
||||
FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc)
|
||||
@@ -54,13 +55,13 @@ def sample_pending_review(test_user_id: str) -> PendingHumanReviewModel:
|
||||
|
||||
|
||||
def test_get_pending_reviews_empty(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test getting pending reviews when none exist"""
|
||||
mock_get_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_user"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_user"
|
||||
)
|
||||
mock_get_reviews.return_value = []
|
||||
|
||||
@@ -72,14 +73,14 @@ def test_get_pending_reviews_empty(
|
||||
|
||||
|
||||
def test_get_pending_reviews_with_data(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test getting pending reviews with data"""
|
||||
mock_get_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_user"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_user"
|
||||
)
|
||||
mock_get_reviews.return_value = [sample_pending_review]
|
||||
|
||||
@@ -94,14 +95,14 @@ def test_get_pending_reviews_with_data(
|
||||
|
||||
|
||||
def test_get_pending_reviews_for_execution_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test getting pending reviews for specific execution"""
|
||||
mock_get_graph_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_graph_execution_meta"
|
||||
"backend.api.features.executions.review.routes.get_graph_execution_meta"
|
||||
)
|
||||
mock_get_graph_execution.return_value = {
|
||||
"id": "test_graph_exec_456",
|
||||
@@ -109,7 +110,7 @@ def test_get_pending_reviews_for_execution_success(
|
||||
}
|
||||
|
||||
mock_get_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews.return_value = [sample_pending_review]
|
||||
|
||||
@@ -121,24 +122,23 @@ def test_get_pending_reviews_for_execution_success(
|
||||
assert data[0]["graph_exec_id"] == "test_graph_exec_456"
|
||||
|
||||
|
||||
def test_get_pending_reviews_for_execution_access_denied(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
test_user_id: str,
|
||||
def test_get_pending_reviews_for_execution_not_available(
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
) -> None:
|
||||
"""Test access denied when user doesn't own the execution"""
|
||||
mock_get_graph_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_graph_execution_meta"
|
||||
"backend.api.features.executions.review.routes.get_graph_execution_meta"
|
||||
)
|
||||
mock_get_graph_execution.return_value = None
|
||||
|
||||
response = client.get("/api/review/execution/test_graph_exec_456")
|
||||
|
||||
assert response.status_code == 403
|
||||
assert "Access denied" in response.json()["detail"]
|
||||
assert response.status_code == 404
|
||||
assert "not found" in response.json()["detail"]
|
||||
|
||||
|
||||
def test_process_review_action_approve_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
@@ -146,12 +146,12 @@ def test_process_review_action_approve_success(
|
||||
# Mock the route functions
|
||||
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [sample_pending_review]
|
||||
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
# Create approved review for return
|
||||
approved_review = PendingHumanReviewModel(
|
||||
@@ -174,11 +174,11 @@ def test_process_review_action_approve_success(
|
||||
mock_process_all_reviews.return_value = {"test_node_123": approved_review}
|
||||
|
||||
mock_has_pending = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
"backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
)
|
||||
mock_has_pending.return_value = False
|
||||
|
||||
mocker.patch("backend.server.v2.executions.review.routes.add_graph_execution")
|
||||
mocker.patch("backend.api.features.executions.review.routes.add_graph_execution")
|
||||
|
||||
request_data = {
|
||||
"reviews": [
|
||||
@@ -202,7 +202,7 @@ def test_process_review_action_approve_success(
|
||||
|
||||
|
||||
def test_process_review_action_reject_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
@@ -210,12 +210,12 @@ def test_process_review_action_reject_success(
|
||||
# Mock the route functions
|
||||
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [sample_pending_review]
|
||||
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
rejected_review = PendingHumanReviewModel(
|
||||
node_exec_id="test_node_123",
|
||||
@@ -237,7 +237,7 @@ def test_process_review_action_reject_success(
|
||||
mock_process_all_reviews.return_value = {"test_node_123": rejected_review}
|
||||
|
||||
mock_has_pending = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
"backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
)
|
||||
mock_has_pending.return_value = False
|
||||
|
||||
@@ -262,7 +262,7 @@ def test_process_review_action_reject_success(
|
||||
|
||||
|
||||
def test_process_review_action_mixed_success(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
@@ -289,12 +289,12 @@ def test_process_review_action_mixed_success(
|
||||
# Mock the route functions
|
||||
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [sample_pending_review, second_review]
|
||||
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
# Create approved version of first review
|
||||
approved_review = PendingHumanReviewModel(
|
||||
@@ -338,7 +338,7 @@ def test_process_review_action_mixed_success(
|
||||
}
|
||||
|
||||
mock_has_pending = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
"backend.api.features.executions.review.routes.has_pending_reviews_for_graph_exec"
|
||||
)
|
||||
mock_has_pending.return_value = False
|
||||
|
||||
@@ -369,7 +369,7 @@ def test_process_review_action_mixed_success(
|
||||
|
||||
|
||||
def test_process_review_action_empty_request(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error when no reviews provided"""
|
||||
@@ -386,19 +386,19 @@ def test_process_review_action_empty_request(
|
||||
|
||||
|
||||
def test_process_review_action_review_not_found(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test error when review is not found"""
|
||||
# Mock the functions that extract graph execution ID from the request
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [] # No reviews found
|
||||
|
||||
# Mock process_all_reviews to simulate not finding reviews
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
# This should raise a ValueError with "Reviews not found" message based on the data/human_review.py logic
|
||||
mock_process_all_reviews.side_effect = ValueError(
|
||||
@@ -422,20 +422,20 @@ def test_process_review_action_review_not_found(
|
||||
|
||||
|
||||
def test_process_review_action_partial_failure(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test handling of partial failures in review processing"""
|
||||
# Mock the route functions
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [sample_pending_review]
|
||||
|
||||
# Mock partial failure in processing
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
mock_process_all_reviews.side_effect = ValueError("Some reviews failed validation")
|
||||
|
||||
@@ -456,20 +456,20 @@ def test_process_review_action_partial_failure(
|
||||
|
||||
|
||||
def test_process_review_action_invalid_node_exec_id(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
mocker: pytest_mock.MockerFixture,
|
||||
sample_pending_review: PendingHumanReviewModel,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
"""Test failure when trying to process review with invalid node execution ID"""
|
||||
# Mock the route functions
|
||||
mock_get_reviews_for_execution = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.get_pending_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.get_pending_reviews_for_execution"
|
||||
)
|
||||
mock_get_reviews_for_execution.return_value = [sample_pending_review]
|
||||
|
||||
# Mock validation failure - this should return 400, not 500
|
||||
mock_process_all_reviews = mocker.patch(
|
||||
"backend.server.v2.executions.review.routes.process_all_reviews_for_execution"
|
||||
"backend.api.features.executions.review.routes.process_all_reviews_for_execution"
|
||||
)
|
||||
mock_process_all_reviews.side_effect = ValueError(
|
||||
"Invalid node execution ID format"
|
||||
@@ -13,11 +13,8 @@ from backend.data.human_review import (
|
||||
process_all_reviews_for_execution,
|
||||
)
|
||||
from backend.executor.utils import add_graph_execution
|
||||
from backend.server.v2.executions.review.model import (
|
||||
PendingHumanReviewModel,
|
||||
ReviewRequest,
|
||||
ReviewResponse,
|
||||
)
|
||||
|
||||
from .model import PendingHumanReviewModel, ReviewRequest, ReviewResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -70,8 +67,7 @@ async def list_pending_reviews(
|
||||
response_model=List[PendingHumanReviewModel],
|
||||
responses={
|
||||
200: {"description": "List of pending reviews for the execution"},
|
||||
400: {"description": "Invalid graph execution ID"},
|
||||
403: {"description": "Access denied to graph execution"},
|
||||
404: {"description": "Graph execution not found"},
|
||||
500: {"description": "Server error", "content": {"application/json": {}}},
|
||||
},
|
||||
)
|
||||
@@ -94,7 +90,7 @@ async def list_pending_reviews_for_execution(
|
||||
|
||||
Raises:
|
||||
HTTPException:
|
||||
- 403: If user doesn't own the graph execution
|
||||
- 404: If the graph execution doesn't exist or isn't owned by this user
|
||||
- 500: If authentication fails or database error occurs
|
||||
|
||||
Note:
|
||||
@@ -108,8 +104,8 @@ async def list_pending_reviews_for_execution(
|
||||
)
|
||||
if not graph_exec:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to graph execution",
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Graph execution #{graph_exec_id} not found",
|
||||
)
|
||||
|
||||
return await get_pending_reviews_for_execution(graph_exec_id, user_id)
|
||||
@@ -17,6 +17,8 @@ from fastapi import (
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_502_BAD_GATEWAY
|
||||
|
||||
from backend.api.features.library.db import set_preset_webhook, update_preset
|
||||
from backend.api.features.library.model import LibraryAgentPreset
|
||||
from backend.data.graph import NodeModel, get_graph, set_node_webhook
|
||||
from backend.data.integrations import (
|
||||
WebhookEvent,
|
||||
@@ -45,13 +47,6 @@ from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.oauth import CREDENTIALS_BY_PROVIDER, HANDLERS_BY_NAME
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.integrations.webhooks import get_webhook_manager
|
||||
from backend.server.integrations.models import (
|
||||
ProviderConstants,
|
||||
ProviderNamesResponse,
|
||||
get_all_provider_names,
|
||||
)
|
||||
from backend.server.v2.library.db import set_preset_webhook, update_preset
|
||||
from backend.server.v2.library.model import LibraryAgentPreset
|
||||
from backend.util.exceptions import (
|
||||
GraphNotInLibraryError,
|
||||
MissingConfigError,
|
||||
@@ -60,6 +55,8 @@ from backend.util.exceptions import (
|
||||
)
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from .models import ProviderConstants, ProviderNamesResponse, get_all_provider_names
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.integrations.oauth import BaseOAuthHandler
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Literal, Optional, cast
|
||||
from typing import Literal, Optional
|
||||
|
||||
import fastapi
|
||||
import prisma.errors
|
||||
import prisma.fields
|
||||
import prisma.models
|
||||
import prisma.types
|
||||
from prisma.types import LibraryAgentCreateInput
|
||||
|
||||
import backend.api.features.store.exceptions as store_exceptions
|
||||
import backend.api.features.store.image_gen as store_image_gen
|
||||
import backend.api.features.store.media as store_media
|
||||
import backend.data.graph as graph_db
|
||||
import backend.data.integrations as integrations_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
import backend.server.v2.store.image_gen as store_image_gen
|
||||
import backend.server.v2.store.media as store_media
|
||||
from backend.data.block import BlockInput
|
||||
from backend.data.db import transaction
|
||||
from backend.data.execution import get_graph_execution
|
||||
@@ -29,6 +26,8 @@ from backend.util.json import SafeJson
|
||||
from backend.util.models import Pagination
|
||||
from backend.util.settings import Config
|
||||
|
||||
from . import model as library_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
integration_creds_manager = IntegrationCredentialsManager()
|
||||
@@ -539,6 +538,7 @@ async def update_library_agent(
|
||||
library_agent_id: str,
|
||||
user_id: str,
|
||||
auto_update_version: Optional[bool] = None,
|
||||
graph_version: Optional[int] = None,
|
||||
is_favorite: Optional[bool] = None,
|
||||
is_archived: Optional[bool] = None,
|
||||
is_deleted: Optional[Literal[False]] = None,
|
||||
@@ -551,6 +551,7 @@ async def update_library_agent(
|
||||
library_agent_id: The ID of the LibraryAgent to update.
|
||||
user_id: The owner of this LibraryAgent.
|
||||
auto_update_version: Whether the agent should auto-update to active version.
|
||||
graph_version: Specific graph version to update to.
|
||||
is_favorite: Whether this agent is marked as a favorite.
|
||||
is_archived: Whether this agent is archived.
|
||||
settings: User-specific settings for this library agent.
|
||||
@@ -564,8 +565,8 @@ async def update_library_agent(
|
||||
"""
|
||||
logger.debug(
|
||||
f"Updating library agent {library_agent_id} for user {user_id} with "
|
||||
f"auto_update_version={auto_update_version}, is_favorite={is_favorite}, "
|
||||
f"is_archived={is_archived}, settings={settings}"
|
||||
f"auto_update_version={auto_update_version}, graph_version={graph_version}, "
|
||||
f"is_favorite={is_favorite}, is_archived={is_archived}, settings={settings}"
|
||||
)
|
||||
update_fields: prisma.types.LibraryAgentUpdateManyMutationInput = {}
|
||||
if auto_update_version is not None:
|
||||
@@ -582,10 +583,23 @@ async def update_library_agent(
|
||||
update_fields["isDeleted"] = is_deleted
|
||||
if settings is not None:
|
||||
update_fields["settings"] = SafeJson(settings.model_dump())
|
||||
if not update_fields:
|
||||
raise ValueError("No values were passed to update")
|
||||
|
||||
try:
|
||||
# If graph_version is provided, update to that specific version
|
||||
if graph_version is not None:
|
||||
# Get the current agent to find its graph_id
|
||||
agent = await get_library_agent(id=library_agent_id, user_id=user_id)
|
||||
# Update to the specified version using existing function
|
||||
return await update_agent_version_in_library(
|
||||
user_id=user_id,
|
||||
agent_graph_id=agent.graph_id,
|
||||
agent_graph_version=graph_version,
|
||||
)
|
||||
|
||||
# Otherwise, just update the simple fields
|
||||
if not update_fields:
|
||||
raise ValueError("No values were passed to update")
|
||||
|
||||
n_updated = await prisma.models.LibraryAgent.prisma().update_many(
|
||||
where={"id": library_agent_id, "userId": user_id},
|
||||
data=update_fields,
|
||||
@@ -803,21 +817,18 @@ async def add_store_agent_to_library(
|
||||
|
||||
# Create LibraryAgent entry
|
||||
added_agent = await prisma.models.LibraryAgent.prisma().create(
|
||||
data=cast(
|
||||
LibraryAgentCreateInput,
|
||||
{
|
||||
"User": {"connect": {"id": user_id}},
|
||||
"AgentGraph": {
|
||||
"connect": {
|
||||
"graphVersionId": {"id": graph.id, "version": graph.version}
|
||||
}
|
||||
},
|
||||
"isCreatedByUser": False,
|
||||
"settings": SafeJson(
|
||||
_initialize_graph_settings(graph_model).model_dump()
|
||||
),
|
||||
data={
|
||||
"User": {"connect": {"id": user_id}},
|
||||
"AgentGraph": {
|
||||
"connect": {
|
||||
"graphVersionId": {"id": graph.id, "version": graph.version}
|
||||
}
|
||||
},
|
||||
),
|
||||
"isCreatedByUser": False,
|
||||
"settings": SafeJson(
|
||||
_initialize_graph_settings(graph_model).model_dump()
|
||||
),
|
||||
},
|
||||
include=library_agent_include(
|
||||
user_id, include_nodes=False, include_executions=False
|
||||
),
|
||||
@@ -1,16 +1,15 @@
|
||||
from datetime import datetime
|
||||
|
||||
import prisma.enums
|
||||
import prisma.errors
|
||||
import prisma.models
|
||||
import prisma.types
|
||||
import pytest
|
||||
|
||||
import backend.server.v2.library.db as db
|
||||
import backend.server.v2.store.exceptions
|
||||
import backend.api.features.store.exceptions
|
||||
from backend.data.db import connect
|
||||
from backend.data.includes import library_agent_include
|
||||
|
||||
from . import db
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_library_agents(mocker):
|
||||
@@ -88,7 +87,7 @@ async def test_add_agent_to_library(mocker):
|
||||
await connect()
|
||||
|
||||
# Mock the transaction context
|
||||
mock_transaction = mocker.patch("backend.server.v2.library.db.transaction")
|
||||
mock_transaction = mocker.patch("backend.api.features.library.db.transaction")
|
||||
mock_transaction.return_value.__aenter__ = mocker.AsyncMock(return_value=None)
|
||||
mock_transaction.return_value.__aexit__ = mocker.AsyncMock(return_value=None)
|
||||
# Mock data
|
||||
@@ -151,7 +150,7 @@ async def test_add_agent_to_library(mocker):
|
||||
)
|
||||
|
||||
# Mock graph_db.get_graph function that's called to check for HITL blocks
|
||||
mock_graph_db = mocker.patch("backend.server.v2.library.db.graph_db")
|
||||
mock_graph_db = mocker.patch("backend.api.features.library.db.graph_db")
|
||||
mock_graph_model = mocker.Mock()
|
||||
mock_graph_model.nodes = (
|
||||
[]
|
||||
@@ -159,7 +158,9 @@ async def test_add_agent_to_library(mocker):
|
||||
mock_graph_db.get_graph = mocker.AsyncMock(return_value=mock_graph_model)
|
||||
|
||||
# Mock the model conversion
|
||||
mock_from_db = mocker.patch("backend.server.v2.library.model.LibraryAgent.from_db")
|
||||
mock_from_db = mocker.patch(
|
||||
"backend.api.features.library.model.LibraryAgent.from_db"
|
||||
)
|
||||
mock_from_db.return_value = mocker.Mock()
|
||||
|
||||
# Call function
|
||||
@@ -217,7 +218,7 @@ async def test_add_agent_to_library_not_found(mocker):
|
||||
)
|
||||
|
||||
# Call function and verify exception
|
||||
with pytest.raises(backend.server.v2.store.exceptions.AgentNotFoundError):
|
||||
with pytest.raises(backend.api.features.store.exceptions.AgentNotFoundError):
|
||||
await db.add_store_agent_to_library("version123", "test-user")
|
||||
|
||||
# Verify mock called correctly
|
||||
@@ -385,6 +385,9 @@ class LibraryAgentUpdateRequest(pydantic.BaseModel):
|
||||
auto_update_version: Optional[bool] = pydantic.Field(
|
||||
default=None, description="Auto-update the agent version"
|
||||
)
|
||||
graph_version: Optional[int] = pydantic.Field(
|
||||
default=None, description="Specific graph version to update to"
|
||||
)
|
||||
is_favorite: Optional[bool] = pydantic.Field(
|
||||
default=None, description="Mark the agent as a favorite"
|
||||
)
|
||||
@@ -3,7 +3,7 @@ import datetime
|
||||
import prisma.models
|
||||
import pytest
|
||||
|
||||
import backend.server.v2.library.model as library_model
|
||||
from . import model as library_model
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -6,12 +6,13 @@ from fastapi import APIRouter, Body, HTTPException, Query, Security, status
|
||||
from fastapi.responses import Response
|
||||
from prisma.enums import OnboardingStep
|
||||
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
import backend.api.features.store.exceptions as store_exceptions
|
||||
from backend.data.onboarding import complete_onboarding_step
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
from .. import db as library_db
|
||||
from .. import model as library_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
@@ -284,6 +285,7 @@ async def update_library_agent(
|
||||
library_agent_id=library_agent_id,
|
||||
user_id=user_id,
|
||||
auto_update_version=payload.auto_update_version,
|
||||
graph_version=payload.graph_version,
|
||||
is_favorite=payload.is_favorite,
|
||||
is_archived=payload.is_archived,
|
||||
settings=payload.settings,
|
||||
@@ -4,8 +4,6 @@ from typing import Any, Optional
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, Body, HTTPException, Query, Security, status
|
||||
|
||||
import backend.server.v2.library.db as db
|
||||
import backend.server.v2.library.model as models
|
||||
from backend.data.execution import GraphExecutionMeta
|
||||
from backend.data.graph import get_graph
|
||||
from backend.data.integrations import get_webhook
|
||||
@@ -17,6 +15,9 @@ from backend.integrations.webhooks import get_webhook_manager
|
||||
from backend.integrations.webhooks.utils import setup_webhook_for_block
|
||||
from backend.util.exceptions import NotFoundError
|
||||
|
||||
from .. import db
|
||||
from .. import model as models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
credentials_manager = IntegrationCredentialsManager()
|
||||
@@ -7,10 +7,11 @@ import pytest
|
||||
import pytest_mock
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
import backend.server.v2.library.model as library_model
|
||||
from backend.server.v2.library.routes import router as library_router
|
||||
from backend.util.models import Pagination
|
||||
|
||||
from . import model as library_model
|
||||
from .routes import router as library_router
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(library_router)
|
||||
|
||||
@@ -86,7 +87,7 @@ async def test_get_library_agents_success(
|
||||
total_items=2, total_pages=1, current_page=1, page_size=50
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/agents?search_term=test")
|
||||
@@ -112,7 +113,7 @@ async def test_get_library_agents_success(
|
||||
|
||||
|
||||
def test_get_library_agents_error(mocker: pytest_mock.MockFixture, test_user_id: str):
|
||||
mock_db_call = mocker.patch("backend.server.v2.library.db.list_library_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.library.db.list_library_agents")
|
||||
mock_db_call.side_effect = Exception("Test error")
|
||||
|
||||
response = client.get("/agents?search_term=test")
|
||||
@@ -161,7 +162,7 @@ async def test_get_favorite_library_agents_success(
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch(
|
||||
"backend.server.v2.library.db.list_favorite_library_agents"
|
||||
"backend.api.features.library.db.list_favorite_library_agents"
|
||||
)
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
@@ -184,7 +185,7 @@ def test_get_favorite_library_agents_error(
|
||||
mocker: pytest_mock.MockFixture, test_user_id: str
|
||||
):
|
||||
mock_db_call = mocker.patch(
|
||||
"backend.server.v2.library.db.list_favorite_library_agents"
|
||||
"backend.api.features.library.db.list_favorite_library_agents"
|
||||
)
|
||||
mock_db_call.side_effect = Exception("Test error")
|
||||
|
||||
@@ -223,11 +224,11 @@ def test_add_agent_to_library_success(
|
||||
)
|
||||
|
||||
mock_db_call = mocker.patch(
|
||||
"backend.server.v2.library.db.add_store_agent_to_library"
|
||||
"backend.api.features.library.db.add_store_agent_to_library"
|
||||
)
|
||||
mock_db_call.return_value = mock_library_agent
|
||||
mock_complete_onboarding = mocker.patch(
|
||||
"backend.server.v2.library.routes.agents.complete_onboarding_step",
|
||||
"backend.api.features.library.routes.agents.complete_onboarding_step",
|
||||
new_callable=AsyncMock,
|
||||
)
|
||||
|
||||
@@ -249,7 +250,7 @@ def test_add_agent_to_library_success(
|
||||
|
||||
def test_add_agent_to_library_error(mocker: pytest_mock.MockFixture, test_user_id: str):
|
||||
mock_db_call = mocker.patch(
|
||||
"backend.server.v2.library.db.add_store_agent_to_library"
|
||||
"backend.api.features.library.db.add_store_agent_to_library"
|
||||
)
|
||||
mock_db_call.side_effect = Exception("Test error")
|
||||
|
||||
@@ -5,11 +5,11 @@ Implements OAuth 2.0 Authorization Code flow with PKCE support.
|
||||
|
||||
Flow:
|
||||
1. User clicks "Login with AutoGPT" in 3rd party app
|
||||
2. App redirects user to /oauth/authorize with client_id, redirect_uri, scope, state
|
||||
2. App redirects user to /auth/authorize with client_id, redirect_uri, scope, state
|
||||
3. User sees consent screen (if not already logged in, redirects to login first)
|
||||
4. User approves → backend creates authorization code
|
||||
5. User redirected back to app with code
|
||||
6. App exchanges code for access/refresh tokens at /oauth/token
|
||||
6. App exchanges code for access/refresh tokens at /api/oauth/token
|
||||
7. App uses access token to call external API endpoints
|
||||
"""
|
||||
|
||||
@@ -16,7 +16,7 @@ import base64
|
||||
import hashlib
|
||||
import secrets
|
||||
import uuid
|
||||
from typing import AsyncGenerator, cast
|
||||
from typing import AsyncGenerator
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
@@ -27,15 +27,8 @@ from prisma.models import OAuthApplication as PrismaOAuthApplication
|
||||
from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
||||
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
||||
from prisma.models import User as PrismaUser
|
||||
from prisma.types import (
|
||||
OAuthAccessTokenCreateInput,
|
||||
OAuthApplicationCreateInput,
|
||||
OAuthAuthorizationCodeCreateInput,
|
||||
OAuthRefreshTokenCreateInput,
|
||||
UserCreateInput,
|
||||
)
|
||||
|
||||
from backend.server.rest_api import app
|
||||
from backend.api.rest_api import app
|
||||
|
||||
keysmith = APIKeySmith()
|
||||
|
||||
@@ -55,14 +48,11 @@ def test_user_id() -> str:
|
||||
async def test_user(server, test_user_id: str):
|
||||
"""Create a test user in the database."""
|
||||
await PrismaUser.prisma().create(
|
||||
data=cast(
|
||||
UserCreateInput,
|
||||
{
|
||||
"id": test_user_id,
|
||||
"email": f"oauth-test-{test_user_id}@example.com",
|
||||
"name": "OAuth Test User",
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": test_user_id,
|
||||
"email": f"oauth-test-{test_user_id}@example.com",
|
||||
"name": "OAuth Test User",
|
||||
}
|
||||
)
|
||||
|
||||
yield test_user_id
|
||||
@@ -87,25 +77,22 @@ async def test_oauth_app(test_user: str):
|
||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||
|
||||
await PrismaOAuthApplication.prisma().create(
|
||||
data=cast(
|
||||
OAuthApplicationCreateInput,
|
||||
{
|
||||
"id": app_id,
|
||||
"name": "Test OAuth App",
|
||||
"description": "Test application for integration tests",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": [
|
||||
"https://example.com/callback",
|
||||
"http://localhost:3000/callback",
|
||||
],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": app_id,
|
||||
"name": "Test OAuth App",
|
||||
"description": "Test application for integration tests",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": [
|
||||
"https://example.com/callback",
|
||||
"http://localhost:3000/callback",
|
||||
],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
}
|
||||
)
|
||||
|
||||
yield {
|
||||
@@ -309,22 +296,19 @@ async def inactive_oauth_app(test_user: str):
|
||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||
|
||||
await PrismaOAuthApplication.prisma().create(
|
||||
data=cast(
|
||||
OAuthApplicationCreateInput,
|
||||
{
|
||||
"id": app_id,
|
||||
"name": "Inactive OAuth App",
|
||||
"description": "Inactive test application",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://example.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": False, # Inactive!
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": app_id,
|
||||
"name": "Inactive OAuth App",
|
||||
"description": "Inactive test application",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://example.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": False, # Inactive!
|
||||
}
|
||||
)
|
||||
|
||||
yield {
|
||||
@@ -715,17 +699,14 @@ async def test_token_authorization_code_expired(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
await PrismaOAuthAuthorizationCode.prisma().create(
|
||||
data=cast(
|
||||
OAuthAuthorizationCodeCreateInput,
|
||||
{
|
||||
"code": expired_code,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"redirectUri": test_oauth_app["redirect_uri"],
|
||||
"expiresAt": now - timedelta(hours=1), # Already expired
|
||||
},
|
||||
)
|
||||
data={
|
||||
"code": expired_code,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"redirectUri": test_oauth_app["redirect_uri"],
|
||||
"expiresAt": now - timedelta(hours=1), # Already expired
|
||||
}
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
@@ -961,16 +942,13 @@ async def test_token_refresh_expired(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
await PrismaOAuthRefreshToken.prisma().create(
|
||||
data=cast(
|
||||
OAuthRefreshTokenCreateInput,
|
||||
{
|
||||
"token": expired_token_hash,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now - timedelta(days=1), # Already expired
|
||||
},
|
||||
)
|
||||
data={
|
||||
"token": expired_token_hash,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now - timedelta(days=1), # Already expired
|
||||
}
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
@@ -1002,17 +980,14 @@ async def test_token_refresh_revoked(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
await PrismaOAuthRefreshToken.prisma().create(
|
||||
data=cast(
|
||||
OAuthRefreshTokenCreateInput,
|
||||
{
|
||||
"token": revoked_token_hash,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(days=30), # Not expired
|
||||
"revokedAt": now - timedelta(hours=1), # But revoked
|
||||
},
|
||||
)
|
||||
data={
|
||||
"token": revoked_token_hash,
|
||||
"applicationId": test_oauth_app["id"],
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(days=30), # Not expired
|
||||
"revokedAt": now - timedelta(hours=1), # But revoked
|
||||
}
|
||||
)
|
||||
|
||||
response = await client.post(
|
||||
@@ -1038,22 +1013,19 @@ async def other_oauth_app(test_user: str):
|
||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||
|
||||
await PrismaOAuthApplication.prisma().create(
|
||||
data=cast(
|
||||
OAuthApplicationCreateInput,
|
||||
{
|
||||
"id": app_id,
|
||||
"name": "Other OAuth App",
|
||||
"description": "Second test application",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://other.example.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": app_id,
|
||||
"name": "Other OAuth App",
|
||||
"description": "Second test application",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://other.example.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
}
|
||||
)
|
||||
|
||||
yield {
|
||||
@@ -1080,16 +1052,13 @@ async def test_token_refresh_wrong_application(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
await PrismaOAuthRefreshToken.prisma().create(
|
||||
data=cast(
|
||||
OAuthRefreshTokenCreateInput,
|
||||
{
|
||||
"token": token_hash,
|
||||
"applicationId": test_oauth_app["id"], # Belongs to test_oauth_app
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(days=30),
|
||||
},
|
||||
)
|
||||
data={
|
||||
"token": token_hash,
|
||||
"applicationId": test_oauth_app["id"], # Belongs to test_oauth_app
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(days=30),
|
||||
}
|
||||
)
|
||||
|
||||
# Try to use it with `other_oauth_app`
|
||||
@@ -1298,22 +1267,19 @@ async def test_validate_access_token_fails_when_app_disabled(
|
||||
client_secret_hash, client_secret_salt = keysmith.hash_key(client_secret_plaintext)
|
||||
|
||||
await PrismaOAuthApplication.prisma().create(
|
||||
data=cast(
|
||||
OAuthApplicationCreateInput,
|
||||
{
|
||||
"id": app_id,
|
||||
"name": "App To Be Disabled",
|
||||
"description": "Test app for disabled validation",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://example.com/callback"],
|
||||
"grantTypes": ["authorization_code"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": app_id,
|
||||
"name": "App To Be Disabled",
|
||||
"description": "Test app for disabled validation",
|
||||
"clientId": client_id,
|
||||
"clientSecret": client_secret_hash,
|
||||
"clientSecretSalt": client_secret_salt,
|
||||
"redirectUris": ["https://example.com/callback"],
|
||||
"grantTypes": ["authorization_code"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
}
|
||||
)
|
||||
|
||||
# Create an access token directly in the database
|
||||
@@ -1322,16 +1288,13 @@ async def test_validate_access_token_fails_when_app_disabled(
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
await PrismaOAuthAccessToken.prisma().create(
|
||||
data=cast(
|
||||
OAuthAccessTokenCreateInput,
|
||||
{
|
||||
"token": token_hash,
|
||||
"applicationId": app_id,
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(hours=1),
|
||||
},
|
||||
)
|
||||
data={
|
||||
"token": token_hash,
|
||||
"applicationId": app_id,
|
||||
"userId": test_user,
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH],
|
||||
"expiresAt": now + timedelta(hours=1),
|
||||
}
|
||||
)
|
||||
|
||||
# Token should be valid while app is active
|
||||
@@ -1598,22 +1561,19 @@ async def test_revoke_token_from_different_app_fails_silently(
|
||||
)
|
||||
|
||||
await PrismaOAuthApplication.prisma().create(
|
||||
data=cast(
|
||||
OAuthApplicationCreateInput,
|
||||
{
|
||||
"id": app2_id,
|
||||
"name": "Second Test OAuth App",
|
||||
"description": "Second test application for cross-app revocation test",
|
||||
"clientId": app2_client_id,
|
||||
"clientSecret": app2_client_secret_hash,
|
||||
"clientSecretSalt": app2_client_secret_salt,
|
||||
"redirectUris": ["https://other-app.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
data={
|
||||
"id": app2_id,
|
||||
"name": "Second Test OAuth App",
|
||||
"description": "Second test application for cross-app revocation test",
|
||||
"clientId": app2_client_id,
|
||||
"clientSecret": app2_client_secret_hash,
|
||||
"clientSecretSalt": app2_client_secret_salt,
|
||||
"redirectUris": ["https://other-app.com/callback"],
|
||||
"grantTypes": ["authorization_code", "refresh_token"],
|
||||
"scopes": [APIKeyPermission.EXECUTE_GRAPH, APIKeyPermission.READ_GRAPH],
|
||||
"ownerId": test_user,
|
||||
"isActive": True,
|
||||
}
|
||||
)
|
||||
|
||||
# App 2 tries to revoke App 1's access token
|
||||
@@ -6,9 +6,9 @@ import pytest
|
||||
import pytest_mock
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
import backend.server.v2.otto.models as otto_models
|
||||
import backend.server.v2.otto.routes as otto_routes
|
||||
from backend.server.v2.otto.service import OttoService
|
||||
from . import models as otto_models
|
||||
from . import routes as otto_routes
|
||||
from .service import OttoService
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(otto_routes.router)
|
||||
@@ -4,12 +4,15 @@ from typing import Annotated
|
||||
from fastapi import APIRouter, Body, HTTPException, Query, Security
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from backend.api.utils.api_key_auth import APIKeyAuthenticator
|
||||
from backend.data.user import (
|
||||
get_user_by_email,
|
||||
set_user_email_verification,
|
||||
unsubscribe_user_by_token,
|
||||
)
|
||||
from backend.server.routers.postmark.models import (
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from .models import (
|
||||
PostmarkBounceEnum,
|
||||
PostmarkBounceWebhook,
|
||||
PostmarkClickWebhook,
|
||||
@@ -19,8 +22,6 @@ from backend.server.routers.postmark.models import (
|
||||
PostmarkSubscriptionChangeWebhook,
|
||||
PostmarkWebhook,
|
||||
)
|
||||
from backend.server.utils.api_key_auth import APIKeyAuthenticator
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
@@ -56,7 +57,7 @@ async def postmark_webhook_handler(
|
||||
webhook: Annotated[
|
||||
PostmarkWebhook,
|
||||
Body(discriminator="RecordType"),
|
||||
],
|
||||
]
|
||||
):
|
||||
logger.info(f"Received webhook from Postmark: {webhook}")
|
||||
match webhook:
|
||||
@@ -1,8 +1,9 @@
|
||||
from typing import Literal
|
||||
|
||||
import backend.server.v2.store.db
|
||||
from backend.util.cache import cached
|
||||
|
||||
from . import db as store_db
|
||||
|
||||
##############################################
|
||||
############### Caches #######################
|
||||
##############################################
|
||||
@@ -29,7 +30,7 @@ async def _get_cached_store_agents(
|
||||
page_size: int,
|
||||
):
|
||||
"""Cached helper to get store agents."""
|
||||
return await backend.server.v2.store.db.get_store_agents(
|
||||
return await store_db.get_store_agents(
|
||||
featured=featured,
|
||||
creators=[creator] if creator else None,
|
||||
sorted_by=sorted_by,
|
||||
@@ -42,10 +43,12 @@ async def _get_cached_store_agents(
|
||||
|
||||
# Cache individual agent details for 15 minutes
|
||||
@cached(maxsize=200, ttl_seconds=300, shared_cache=True)
|
||||
async def _get_cached_agent_details(username: str, agent_name: str):
|
||||
async def _get_cached_agent_details(
|
||||
username: str, agent_name: str, include_changelog: bool = False
|
||||
):
|
||||
"""Cached helper to get agent details."""
|
||||
return await backend.server.v2.store.db.get_store_agent_details(
|
||||
username=username, agent_name=agent_name
|
||||
return await store_db.get_store_agent_details(
|
||||
username=username, agent_name=agent_name, include_changelog=include_changelog
|
||||
)
|
||||
|
||||
|
||||
@@ -59,7 +62,7 @@ async def _get_cached_store_creators(
|
||||
page_size: int,
|
||||
):
|
||||
"""Cached helper to get store creators."""
|
||||
return await backend.server.v2.store.db.get_store_creators(
|
||||
return await store_db.get_store_creators(
|
||||
featured=featured,
|
||||
search_query=search_query,
|
||||
sorted_by=sorted_by,
|
||||
@@ -72,6 +75,4 @@ async def _get_cached_store_creators(
|
||||
@cached(maxsize=100, ttl_seconds=300, shared_cache=True)
|
||||
async def _get_cached_creator_details(username: str):
|
||||
"""Cached helper to get creator details."""
|
||||
return await backend.server.v2.store.db.get_store_creator_details(
|
||||
username=username.lower()
|
||||
)
|
||||
return await store_db.get_store_creator_details(username=username.lower())
|
||||
@@ -2,17 +2,14 @@ import asyncio
|
||||
import logging
|
||||
import typing
|
||||
from datetime import datetime, timezone
|
||||
from typing import Literal, cast
|
||||
from typing import Literal
|
||||
|
||||
import fastapi
|
||||
import prisma.enums
|
||||
import prisma.errors
|
||||
import prisma.models
|
||||
import prisma.types
|
||||
from prisma.types import SearchTermsCreateInput, StoreListingVersionCreateInput
|
||||
|
||||
import backend.server.v2.store.exceptions
|
||||
import backend.server.v2.store.model
|
||||
from backend.data.db import query_raw_with_schema, transaction
|
||||
from backend.data.graph import (
|
||||
GraphMeta,
|
||||
@@ -31,6 +28,9 @@ from backend.notifications.notifications import queue_notification_async
|
||||
from backend.util.exceptions import DatabaseError
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from . import exceptions as store_exceptions
|
||||
from . import model as store_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
|
||||
@@ -48,7 +48,7 @@ async def get_store_agents(
|
||||
category: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> backend.server.v2.store.model.StoreAgentsResponse:
|
||||
) -> store_model.StoreAgentsResponse:
|
||||
"""
|
||||
Get PUBLIC store agents from the StoreAgent view
|
||||
"""
|
||||
@@ -149,10 +149,10 @@ async def get_store_agents(
|
||||
total_pages = (total + page_size - 1) // page_size
|
||||
|
||||
# Convert raw results to StoreAgent models
|
||||
store_agents: list[backend.server.v2.store.model.StoreAgent] = []
|
||||
store_agents: list[store_model.StoreAgent] = []
|
||||
for agent in agents:
|
||||
try:
|
||||
store_agent = backend.server.v2.store.model.StoreAgent(
|
||||
store_agent = store_model.StoreAgent(
|
||||
slug=agent["slug"],
|
||||
agent_name=agent["agent_name"],
|
||||
agent_image=(
|
||||
@@ -198,11 +198,11 @@ async def get_store_agents(
|
||||
total = await prisma.models.StoreAgent.prisma().count(where=where_clause)
|
||||
total_pages = (total + page_size - 1) // page_size
|
||||
|
||||
store_agents: list[backend.server.v2.store.model.StoreAgent] = []
|
||||
store_agents: list[store_model.StoreAgent] = []
|
||||
for agent in agents:
|
||||
try:
|
||||
# Create the StoreAgent object safely
|
||||
store_agent = backend.server.v2.store.model.StoreAgent(
|
||||
store_agent = store_model.StoreAgent(
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
agent_image=agent.agent_image[0] if agent.agent_image else "",
|
||||
@@ -224,9 +224,9 @@ async def get_store_agents(
|
||||
continue
|
||||
|
||||
logger.debug(f"Found {len(store_agents)} agents")
|
||||
return backend.server.v2.store.model.StoreAgentsResponse(
|
||||
return store_model.StoreAgentsResponse(
|
||||
agents=store_agents,
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
@@ -249,10 +249,7 @@ async def log_search_term(search_query: str):
|
||||
date = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
try:
|
||||
await prisma.models.SearchTerms.prisma().create(
|
||||
data=cast(
|
||||
SearchTermsCreateInput,
|
||||
{"searchTerm": search_query, "createdDate": date},
|
||||
)
|
||||
data={"searchTerm": search_query, "createdDate": date}
|
||||
)
|
||||
except Exception as e:
|
||||
# Fail silently here so that logging search terms doesn't break the app
|
||||
@@ -260,8 +257,8 @@ async def log_search_term(search_query: str):
|
||||
|
||||
|
||||
async def get_store_agent_details(
|
||||
username: str, agent_name: str
|
||||
) -> backend.server.v2.store.model.StoreAgentDetails:
|
||||
username: str, agent_name: str, include_changelog: bool = False
|
||||
) -> store_model.StoreAgentDetails:
|
||||
"""Get PUBLIC store agent details from the StoreAgent view"""
|
||||
logger.debug(f"Getting store agent details for {username}/{agent_name}")
|
||||
|
||||
@@ -272,7 +269,7 @@ async def get_store_agent_details(
|
||||
|
||||
if not agent:
|
||||
logger.warning(f"Agent not found: {username}/{agent_name}")
|
||||
raise backend.server.v2.store.exceptions.AgentNotFoundError(
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Agent {username}/{agent_name} not found"
|
||||
)
|
||||
|
||||
@@ -325,8 +322,29 @@ async def get_store_agent_details(
|
||||
else:
|
||||
recommended_schedule_cron = None
|
||||
|
||||
# Fetch changelog data if requested
|
||||
changelog_data = None
|
||||
if include_changelog and store_listing:
|
||||
changelog_versions = (
|
||||
await prisma.models.StoreListingVersion.prisma().find_many(
|
||||
where={
|
||||
"storeListingId": store_listing.id,
|
||||
"submissionStatus": prisma.enums.SubmissionStatus.APPROVED,
|
||||
},
|
||||
order=[{"version": "desc"}],
|
||||
)
|
||||
)
|
||||
changelog_data = [
|
||||
store_model.ChangelogEntry(
|
||||
version=str(version.version),
|
||||
changes_summary=version.changesSummary or "No changes recorded",
|
||||
date=version.createdAt,
|
||||
)
|
||||
for version in changelog_versions
|
||||
]
|
||||
|
||||
logger.debug(f"Found agent details for {username}/{agent_name}")
|
||||
return backend.server.v2.store.model.StoreAgentDetails(
|
||||
return store_model.StoreAgentDetails(
|
||||
store_listing_version_id=agent.storeListingVersionId,
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
@@ -341,12 +359,15 @@ async def get_store_agent_details(
|
||||
runs=agent.runs,
|
||||
rating=agent.rating,
|
||||
versions=agent.versions,
|
||||
agentGraphVersions=agent.agentGraphVersions,
|
||||
agentGraphId=agent.agentGraphId,
|
||||
last_updated=agent.updated_at,
|
||||
active_version_id=active_version_id,
|
||||
has_approved_version=has_approved_version,
|
||||
recommended_schedule_cron=recommended_schedule_cron,
|
||||
changelog=changelog_data,
|
||||
)
|
||||
except backend.server.v2.store.exceptions.AgentNotFoundError:
|
||||
except store_exceptions.AgentNotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store agent details: {e}")
|
||||
@@ -382,7 +403,7 @@ async def get_available_graph(store_listing_version_id: str) -> GraphMeta:
|
||||
|
||||
async def get_store_agent_by_version_id(
|
||||
store_listing_version_id: str,
|
||||
) -> backend.server.v2.store.model.StoreAgentDetails:
|
||||
) -> store_model.StoreAgentDetails:
|
||||
logger.debug(f"Getting store agent details for {store_listing_version_id}")
|
||||
|
||||
try:
|
||||
@@ -392,12 +413,12 @@ async def get_store_agent_by_version_id(
|
||||
|
||||
if not agent:
|
||||
logger.warning(f"Agent not found: {store_listing_version_id}")
|
||||
raise backend.server.v2.store.exceptions.AgentNotFoundError(
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Agent {store_listing_version_id} not found"
|
||||
)
|
||||
|
||||
logger.debug(f"Found agent details for {store_listing_version_id}")
|
||||
return backend.server.v2.store.model.StoreAgentDetails(
|
||||
return store_model.StoreAgentDetails(
|
||||
store_listing_version_id=agent.storeListingVersionId,
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
@@ -412,9 +433,11 @@ async def get_store_agent_by_version_id(
|
||||
runs=agent.runs,
|
||||
rating=agent.rating,
|
||||
versions=agent.versions,
|
||||
agentGraphVersions=agent.agentGraphVersions,
|
||||
agentGraphId=agent.agentGraphId,
|
||||
last_updated=agent.updated_at,
|
||||
)
|
||||
except backend.server.v2.store.exceptions.AgentNotFoundError:
|
||||
except store_exceptions.AgentNotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store agent details: {e}")
|
||||
@@ -427,7 +450,7 @@ async def get_store_creators(
|
||||
sorted_by: Literal["agent_rating", "agent_runs", "num_agents"] | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> backend.server.v2.store.model.CreatorsResponse:
|
||||
) -> store_model.CreatorsResponse:
|
||||
"""Get PUBLIC store creators from the Creator view"""
|
||||
logger.debug(
|
||||
f"Getting store creators. featured={featured}, search={search_query}, sorted_by={sorted_by}, page={page}"
|
||||
@@ -502,7 +525,7 @@ async def get_store_creators(
|
||||
|
||||
# Convert to response model
|
||||
creator_models = [
|
||||
backend.server.v2.store.model.Creator(
|
||||
store_model.Creator(
|
||||
username=creator.username,
|
||||
name=creator.name,
|
||||
description=creator.description,
|
||||
@@ -516,9 +539,9 @@ async def get_store_creators(
|
||||
]
|
||||
|
||||
logger.debug(f"Found {len(creator_models)} creators")
|
||||
return backend.server.v2.store.model.CreatorsResponse(
|
||||
return store_model.CreatorsResponse(
|
||||
creators=creator_models,
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
@@ -532,7 +555,7 @@ async def get_store_creators(
|
||||
|
||||
async def get_store_creator_details(
|
||||
username: str,
|
||||
) -> backend.server.v2.store.model.CreatorDetails:
|
||||
) -> store_model.CreatorDetails:
|
||||
logger.debug(f"Getting store creator details for {username}")
|
||||
|
||||
try:
|
||||
@@ -543,12 +566,10 @@ async def get_store_creator_details(
|
||||
|
||||
if not creator:
|
||||
logger.warning(f"Creator not found: {username}")
|
||||
raise backend.server.v2.store.exceptions.CreatorNotFoundError(
|
||||
f"Creator {username} not found"
|
||||
)
|
||||
raise store_exceptions.CreatorNotFoundError(f"Creator {username} not found")
|
||||
|
||||
logger.debug(f"Found creator details for {username}")
|
||||
return backend.server.v2.store.model.CreatorDetails(
|
||||
return store_model.CreatorDetails(
|
||||
name=creator.name,
|
||||
username=creator.username,
|
||||
description=creator.description,
|
||||
@@ -558,7 +579,7 @@ async def get_store_creator_details(
|
||||
agent_runs=creator.agent_runs,
|
||||
top_categories=creator.top_categories,
|
||||
)
|
||||
except backend.server.v2.store.exceptions.CreatorNotFoundError:
|
||||
except store_exceptions.CreatorNotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store creator details: {e}")
|
||||
@@ -567,7 +588,7 @@ async def get_store_creator_details(
|
||||
|
||||
async def get_store_submissions(
|
||||
user_id: str, page: int = 1, page_size: int = 20
|
||||
) -> backend.server.v2.store.model.StoreSubmissionsResponse:
|
||||
) -> store_model.StoreSubmissionsResponse:
|
||||
"""Get store submissions for the authenticated user -- not an admin"""
|
||||
logger.debug(f"Getting store submissions for user {user_id}, page={page}")
|
||||
|
||||
@@ -592,7 +613,7 @@ async def get_store_submissions(
|
||||
# Convert to response models
|
||||
submission_models = []
|
||||
for sub in submissions:
|
||||
submission_model = backend.server.v2.store.model.StoreSubmission(
|
||||
submission_model = store_model.StoreSubmission(
|
||||
agent_id=sub.agent_id,
|
||||
agent_version=sub.agent_version,
|
||||
name=sub.name,
|
||||
@@ -617,9 +638,9 @@ async def get_store_submissions(
|
||||
submission_models.append(submission_model)
|
||||
|
||||
logger.debug(f"Found {len(submission_models)} submissions")
|
||||
return backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
return store_model.StoreSubmissionsResponse(
|
||||
submissions=submission_models,
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
@@ -630,9 +651,9 @@ async def get_store_submissions(
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching store submissions: {e}")
|
||||
# Return empty response rather than exposing internal errors
|
||||
return backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
return store_model.StoreSubmissionsResponse(
|
||||
submissions=[],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=0,
|
||||
total_pages=0,
|
||||
@@ -665,7 +686,7 @@ async def delete_store_submission(
|
||||
|
||||
if not submission:
|
||||
logger.warning(f"Submission not found for user {user_id}: {submission_id}")
|
||||
raise backend.server.v2.store.exceptions.SubmissionNotFoundError(
|
||||
raise store_exceptions.SubmissionNotFoundError(
|
||||
f"Submission not found for this user. User ID: {user_id}, Submission ID: {submission_id}"
|
||||
)
|
||||
|
||||
@@ -697,7 +718,7 @@ async def create_store_submission(
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Initial Submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
) -> store_model.StoreSubmission:
|
||||
"""
|
||||
Create the first (and only) store listing and thus submission as a normal user
|
||||
|
||||
@@ -738,7 +759,7 @@ async def create_store_submission(
|
||||
logger.warning(
|
||||
f"Agent not found for user {user_id}: {agent_id} v{agent_version}"
|
||||
)
|
||||
raise backend.server.v2.store.exceptions.AgentNotFoundError(
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}"
|
||||
)
|
||||
|
||||
@@ -811,7 +832,7 @@ async def create_store_submission(
|
||||
|
||||
logger.debug(f"Created store listing for agent {agent_id}")
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
return store_model.StoreSubmission(
|
||||
agent_id=agent_id,
|
||||
agent_version=agent_version,
|
||||
name=name,
|
||||
@@ -834,7 +855,7 @@ async def create_store_submission(
|
||||
logger.debug(
|
||||
f"Slug '{slug}' is already in use by another agent (agent_id: {agent_id}) for user {user_id}"
|
||||
)
|
||||
raise backend.server.v2.store.exceptions.SlugAlreadyInUseError(
|
||||
raise store_exceptions.SlugAlreadyInUseError(
|
||||
f"The URL slug '{slug}' is already in use by another one of your agents. Please choose a different slug."
|
||||
) from exc
|
||||
else:
|
||||
@@ -843,8 +864,8 @@ async def create_store_submission(
|
||||
f"Unique constraint violated (not slug): {error_str}"
|
||||
) from exc
|
||||
except (
|
||||
backend.server.v2.store.exceptions.AgentNotFoundError,
|
||||
backend.server.v2.store.exceptions.ListingExistsError,
|
||||
store_exceptions.AgentNotFoundError,
|
||||
store_exceptions.ListingExistsError,
|
||||
):
|
||||
raise
|
||||
except prisma.errors.PrismaError as e:
|
||||
@@ -865,7 +886,7 @@ async def edit_store_submission(
|
||||
changes_summary: str | None = "Update submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
instructions: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
) -> store_model.StoreSubmission:
|
||||
"""
|
||||
Edit an existing store listing submission.
|
||||
|
||||
@@ -907,7 +928,7 @@ async def edit_store_submission(
|
||||
)
|
||||
|
||||
if not current_version:
|
||||
raise backend.server.v2.store.exceptions.SubmissionNotFoundError(
|
||||
raise store_exceptions.SubmissionNotFoundError(
|
||||
f"Store listing version not found: {store_listing_version_id}"
|
||||
)
|
||||
|
||||
@@ -916,7 +937,7 @@ async def edit_store_submission(
|
||||
not current_version.StoreListing
|
||||
or current_version.StoreListing.owningUserId != user_id
|
||||
):
|
||||
raise backend.server.v2.store.exceptions.UnauthorizedError(
|
||||
raise store_exceptions.UnauthorizedError(
|
||||
f"User {user_id} does not own submission {store_listing_version_id}"
|
||||
)
|
||||
|
||||
@@ -925,7 +946,7 @@ async def edit_store_submission(
|
||||
|
||||
# Check if we can edit this submission
|
||||
if current_version.submissionStatus == prisma.enums.SubmissionStatus.REJECTED:
|
||||
raise backend.server.v2.store.exceptions.InvalidOperationError(
|
||||
raise store_exceptions.InvalidOperationError(
|
||||
"Cannot edit a rejected submission"
|
||||
)
|
||||
|
||||
@@ -974,7 +995,7 @@ async def edit_store_submission(
|
||||
|
||||
if not updated_version:
|
||||
raise DatabaseError("Failed to update store listing version")
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
return store_model.StoreSubmission(
|
||||
agent_id=current_version.agentGraphId,
|
||||
agent_version=current_version.agentGraphVersion,
|
||||
name=name,
|
||||
@@ -995,16 +1016,16 @@ async def edit_store_submission(
|
||||
)
|
||||
|
||||
else:
|
||||
raise backend.server.v2.store.exceptions.InvalidOperationError(
|
||||
raise store_exceptions.InvalidOperationError(
|
||||
f"Cannot edit submission with status: {current_version.submissionStatus}"
|
||||
)
|
||||
|
||||
except (
|
||||
backend.server.v2.store.exceptions.SubmissionNotFoundError,
|
||||
backend.server.v2.store.exceptions.UnauthorizedError,
|
||||
backend.server.v2.store.exceptions.AgentNotFoundError,
|
||||
backend.server.v2.store.exceptions.ListingExistsError,
|
||||
backend.server.v2.store.exceptions.InvalidOperationError,
|
||||
store_exceptions.SubmissionNotFoundError,
|
||||
store_exceptions.UnauthorizedError,
|
||||
store_exceptions.AgentNotFoundError,
|
||||
store_exceptions.ListingExistsError,
|
||||
store_exceptions.InvalidOperationError,
|
||||
):
|
||||
raise
|
||||
except prisma.errors.PrismaError as e:
|
||||
@@ -1027,7 +1048,7 @@ async def create_store_version(
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Initial submission",
|
||||
recommended_schedule_cron: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
) -> store_model.StoreSubmission:
|
||||
"""
|
||||
Create a new version for an existing store listing
|
||||
|
||||
@@ -1060,7 +1081,7 @@ async def create_store_version(
|
||||
)
|
||||
|
||||
if not listing:
|
||||
raise backend.server.v2.store.exceptions.ListingNotFoundError(
|
||||
raise store_exceptions.ListingNotFoundError(
|
||||
f"Store listing not found. User ID: {user_id}, Listing ID: {store_listing_id}"
|
||||
)
|
||||
|
||||
@@ -1072,7 +1093,7 @@ async def create_store_version(
|
||||
)
|
||||
|
||||
if not agent:
|
||||
raise backend.server.v2.store.exceptions.AgentNotFoundError(
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}"
|
||||
)
|
||||
|
||||
@@ -1107,7 +1128,7 @@ async def create_store_version(
|
||||
f"Created new version for listing {store_listing_id} of agent {agent_id}"
|
||||
)
|
||||
# Return submission details
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
return store_model.StoreSubmission(
|
||||
agent_id=agent_id,
|
||||
agent_version=agent_version,
|
||||
name=name,
|
||||
@@ -1134,7 +1155,7 @@ async def create_store_review(
|
||||
store_listing_version_id: str,
|
||||
score: int,
|
||||
comments: str | None = None,
|
||||
) -> backend.server.v2.store.model.StoreReview:
|
||||
) -> store_model.StoreReview:
|
||||
"""Create a review for a store listing as a user to detail their experience"""
|
||||
try:
|
||||
data = prisma.types.StoreListingReviewUpsertInput(
|
||||
@@ -1159,7 +1180,7 @@ async def create_store_review(
|
||||
data=data,
|
||||
)
|
||||
|
||||
return backend.server.v2.store.model.StoreReview(
|
||||
return store_model.StoreReview(
|
||||
score=review.score,
|
||||
comments=review.comments,
|
||||
)
|
||||
@@ -1171,7 +1192,7 @@ async def create_store_review(
|
||||
|
||||
async def get_user_profile(
|
||||
user_id: str,
|
||||
) -> backend.server.v2.store.model.ProfileDetails | None:
|
||||
) -> store_model.ProfileDetails | None:
|
||||
logger.debug(f"Getting user profile for {user_id}")
|
||||
|
||||
try:
|
||||
@@ -1181,7 +1202,7 @@ async def get_user_profile(
|
||||
|
||||
if not profile:
|
||||
return None
|
||||
return backend.server.v2.store.model.ProfileDetails(
|
||||
return store_model.ProfileDetails(
|
||||
name=profile.name,
|
||||
username=profile.username,
|
||||
description=profile.description,
|
||||
@@ -1194,8 +1215,8 @@ async def get_user_profile(
|
||||
|
||||
|
||||
async def update_profile(
|
||||
user_id: str, profile: backend.server.v2.store.model.Profile
|
||||
) -> backend.server.v2.store.model.CreatorDetails:
|
||||
user_id: str, profile: store_model.Profile
|
||||
) -> store_model.CreatorDetails:
|
||||
"""
|
||||
Update the store profile for a user or create a new one if it doesn't exist.
|
||||
Args:
|
||||
@@ -1218,7 +1239,7 @@ async def update_profile(
|
||||
where={"userId": user_id}
|
||||
)
|
||||
if not existing_profile:
|
||||
raise backend.server.v2.store.exceptions.ProfileNotFoundError(
|
||||
raise store_exceptions.ProfileNotFoundError(
|
||||
f"Profile not found for user {user_id}. This should not be possible."
|
||||
)
|
||||
|
||||
@@ -1254,7 +1275,7 @@ async def update_profile(
|
||||
logger.error(f"Failed to update profile for user {user_id}")
|
||||
raise DatabaseError("Failed to update profile")
|
||||
|
||||
return backend.server.v2.store.model.CreatorDetails(
|
||||
return store_model.CreatorDetails(
|
||||
name=updated_profile.name,
|
||||
username=updated_profile.username,
|
||||
description=updated_profile.description,
|
||||
@@ -1274,7 +1295,7 @@ async def get_my_agents(
|
||||
user_id: str,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> backend.server.v2.store.model.MyAgentsResponse:
|
||||
) -> store_model.MyAgentsResponse:
|
||||
"""Get the agents for the authenticated user"""
|
||||
logger.debug(f"Getting my agents for user {user_id}, page={page}")
|
||||
|
||||
@@ -1311,7 +1332,7 @@ async def get_my_agents(
|
||||
total_pages = (total + page_size - 1) // page_size
|
||||
|
||||
my_agents = [
|
||||
backend.server.v2.store.model.MyAgent(
|
||||
store_model.MyAgent(
|
||||
agent_id=graph.id,
|
||||
agent_version=graph.version,
|
||||
agent_name=graph.name or "",
|
||||
@@ -1324,9 +1345,9 @@ async def get_my_agents(
|
||||
if (graph := library_agent.AgentGraph)
|
||||
]
|
||||
|
||||
return backend.server.v2.store.model.MyAgentsResponse(
|
||||
return store_model.MyAgentsResponse(
|
||||
agents=my_agents,
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
@@ -1435,14 +1456,11 @@ async def _approve_sub_agent(
|
||||
# Create new version if no matching version found
|
||||
next_version = max((v.version for v in listing.Versions or []), default=0) + 1
|
||||
await prisma.models.StoreListingVersion.prisma(tx).create(
|
||||
data=cast(
|
||||
StoreListingVersionCreateInput,
|
||||
{
|
||||
**_create_sub_agent_version_data(sub_graph, heading, main_agent_name),
|
||||
"version": next_version,
|
||||
"storeListingId": listing.id,
|
||||
},
|
||||
)
|
||||
data={
|
||||
**_create_sub_agent_version_data(sub_graph, heading, main_agent_name),
|
||||
"version": next_version,
|
||||
"storeListingId": listing.id,
|
||||
}
|
||||
)
|
||||
await prisma.models.StoreListing.prisma(tx).update(
|
||||
where={"id": listing.id}, data={"hasApprovedVersion": True}
|
||||
@@ -1476,7 +1494,7 @@ async def review_store_submission(
|
||||
external_comments: str,
|
||||
internal_comments: str,
|
||||
reviewer_id: str,
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
) -> store_model.StoreSubmission:
|
||||
"""Review a store listing submission as an admin."""
|
||||
try:
|
||||
store_listing_version = (
|
||||
@@ -1689,7 +1707,7 @@ async def review_store_submission(
|
||||
pass
|
||||
|
||||
# Convert to Pydantic model for consistency
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
return store_model.StoreSubmission(
|
||||
agent_id=submission.agentGraphId,
|
||||
agent_version=submission.agentGraphVersion,
|
||||
name=submission.name,
|
||||
@@ -1724,7 +1742,7 @@ async def get_admin_listings_with_versions(
|
||||
search_query: str | None = None,
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
) -> backend.server.v2.store.model.StoreListingsWithVersionsResponse:
|
||||
) -> store_model.StoreListingsWithVersionsResponse:
|
||||
"""
|
||||
Get store listings for admins with all their versions.
|
||||
|
||||
@@ -1823,10 +1841,10 @@ async def get_admin_listings_with_versions(
|
||||
# Convert to response models
|
||||
listings_with_versions = []
|
||||
for listing in listings:
|
||||
versions: list[backend.server.v2.store.model.StoreSubmission] = []
|
||||
versions: list[store_model.StoreSubmission] = []
|
||||
# If we have versions, turn them into StoreSubmission models
|
||||
for version in listing.Versions or []:
|
||||
version_model = backend.server.v2.store.model.StoreSubmission(
|
||||
version_model = store_model.StoreSubmission(
|
||||
agent_id=version.agentGraphId,
|
||||
agent_version=version.agentGraphVersion,
|
||||
name=version.name,
|
||||
@@ -1854,26 +1872,24 @@ async def get_admin_listings_with_versions(
|
||||
|
||||
creator_email = listing.OwningUser.email if listing.OwningUser else None
|
||||
|
||||
listing_with_versions = (
|
||||
backend.server.v2.store.model.StoreListingWithVersions(
|
||||
listing_id=listing.id,
|
||||
slug=listing.slug,
|
||||
agent_id=listing.agentGraphId,
|
||||
agent_version=listing.agentGraphVersion,
|
||||
active_version_id=listing.activeVersionId,
|
||||
has_approved_version=listing.hasApprovedVersion,
|
||||
creator_email=creator_email,
|
||||
latest_version=latest_version,
|
||||
versions=versions,
|
||||
)
|
||||
listing_with_versions = store_model.StoreListingWithVersions(
|
||||
listing_id=listing.id,
|
||||
slug=listing.slug,
|
||||
agent_id=listing.agentGraphId,
|
||||
agent_version=listing.agentGraphVersion,
|
||||
active_version_id=listing.activeVersionId,
|
||||
has_approved_version=listing.hasApprovedVersion,
|
||||
creator_email=creator_email,
|
||||
latest_version=latest_version,
|
||||
versions=versions,
|
||||
)
|
||||
|
||||
listings_with_versions.append(listing_with_versions)
|
||||
|
||||
logger.debug(f"Found {len(listings_with_versions)} listings for admin")
|
||||
return backend.server.v2.store.model.StoreListingsWithVersionsResponse(
|
||||
return store_model.StoreListingsWithVersionsResponse(
|
||||
listings=listings_with_versions,
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=total,
|
||||
total_pages=total_pages,
|
||||
@@ -1883,9 +1899,9 @@ async def get_admin_listings_with_versions(
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching admin store listings: {e}")
|
||||
# Return empty response rather than exposing internal errors
|
||||
return backend.server.v2.store.model.StoreListingsWithVersionsResponse(
|
||||
return store_model.StoreListingsWithVersionsResponse(
|
||||
listings=[],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=page,
|
||||
total_items=0,
|
||||
total_pages=0,
|
||||
@@ -6,8 +6,8 @@ import prisma.models
|
||||
import pytest
|
||||
from prisma import Prisma
|
||||
|
||||
import backend.server.v2.store.db as db
|
||||
from backend.server.v2.store.model import Profile
|
||||
from . import db
|
||||
from .model import Profile
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -40,6 +40,8 @@ async def test_get_store_agents(mocker):
|
||||
runs=10,
|
||||
rating=4.5,
|
||||
versions=["1.0"],
|
||||
agentGraphVersions=["1"],
|
||||
agentGraphId="test-graph-id",
|
||||
updated_at=datetime.now(),
|
||||
is_available=False,
|
||||
useForOnboarding=False,
|
||||
@@ -83,6 +85,8 @@ async def test_get_store_agent_details(mocker):
|
||||
runs=10,
|
||||
rating=4.5,
|
||||
versions=["1.0"],
|
||||
agentGraphVersions=["1"],
|
||||
agentGraphId="test-graph-id",
|
||||
updated_at=datetime.now(),
|
||||
is_available=False,
|
||||
useForOnboarding=False,
|
||||
@@ -105,6 +109,8 @@ async def test_get_store_agent_details(mocker):
|
||||
runs=15,
|
||||
rating=4.8,
|
||||
versions=["1.0", "2.0"],
|
||||
agentGraphVersions=["1", "2"],
|
||||
agentGraphId="test-graph-id-active",
|
||||
updated_at=datetime.now(),
|
||||
is_available=True,
|
||||
useForOnboarding=False,
|
||||
@@ -5,11 +5,12 @@ import uuid
|
||||
import fastapi
|
||||
from gcloud.aio import storage as async_storage
|
||||
|
||||
import backend.server.v2.store.exceptions
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Settings
|
||||
from backend.util.virus_scanner import scan_content_safe
|
||||
|
||||
from . import exceptions as store_exceptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ALLOWED_IMAGE_TYPES = {"image/jpeg", "image/png", "image/gif", "image/webp"}
|
||||
@@ -68,61 +69,55 @@ async def upload_media(
|
||||
await file.seek(0) # Reset file pointer
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading file content: {str(e)}")
|
||||
raise backend.server.v2.store.exceptions.FileReadError(
|
||||
"Failed to read file content"
|
||||
) from e
|
||||
raise store_exceptions.FileReadError("Failed to read file content") from e
|
||||
|
||||
# Validate file signature/magic bytes
|
||||
if file.content_type in ALLOWED_IMAGE_TYPES:
|
||||
# Check image file signatures
|
||||
if content.startswith(b"\xff\xd8\xff"): # JPEG
|
||||
if file.content_type != "image/jpeg":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
elif content.startswith(b"\x89PNG\r\n\x1a\n"): # PNG
|
||||
if file.content_type != "image/png":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
elif content.startswith(b"GIF87a") or content.startswith(b"GIF89a"): # GIF
|
||||
if file.content_type != "image/gif":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
elif content.startswith(b"RIFF") and content[8:12] == b"WEBP": # WebP
|
||||
if file.content_type != "image/webp":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
else:
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
"Invalid image file signature"
|
||||
)
|
||||
raise store_exceptions.InvalidFileTypeError("Invalid image file signature")
|
||||
|
||||
elif file.content_type in ALLOWED_VIDEO_TYPES:
|
||||
# Check video file signatures
|
||||
if content.startswith(b"\x00\x00\x00") and (content[4:8] == b"ftyp"): # MP4
|
||||
if file.content_type != "video/mp4":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
elif content.startswith(b"\x1a\x45\xdf\xa3"): # WebM
|
||||
if file.content_type != "video/webm":
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
"File signature does not match content type"
|
||||
)
|
||||
else:
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
"Invalid video file signature"
|
||||
)
|
||||
raise store_exceptions.InvalidFileTypeError("Invalid video file signature")
|
||||
|
||||
settings = Settings()
|
||||
|
||||
# Check required settings first before doing any file processing
|
||||
if not settings.config.media_gcs_bucket_name:
|
||||
logger.error("Missing GCS bucket name setting")
|
||||
raise backend.server.v2.store.exceptions.StorageConfigError(
|
||||
raise store_exceptions.StorageConfigError(
|
||||
"Missing storage bucket configuration"
|
||||
)
|
||||
|
||||
@@ -137,7 +132,7 @@ async def upload_media(
|
||||
and content_type not in ALLOWED_VIDEO_TYPES
|
||||
):
|
||||
logger.warning(f"Invalid file type attempted: {content_type}")
|
||||
raise backend.server.v2.store.exceptions.InvalidFileTypeError(
|
||||
raise store_exceptions.InvalidFileTypeError(
|
||||
f"File type not supported. Must be jpeg, png, gif, webp, mp4 or webm. Content type: {content_type}"
|
||||
)
|
||||
|
||||
@@ -150,16 +145,14 @@ async def upload_media(
|
||||
file_size += len(chunk)
|
||||
if file_size > MAX_FILE_SIZE:
|
||||
logger.warning(f"File size too large: {file_size} bytes")
|
||||
raise backend.server.v2.store.exceptions.FileSizeTooLargeError(
|
||||
raise store_exceptions.FileSizeTooLargeError(
|
||||
"File too large. Maximum size is 50MB"
|
||||
)
|
||||
except backend.server.v2.store.exceptions.FileSizeTooLargeError:
|
||||
except store_exceptions.FileSizeTooLargeError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading file chunks: {str(e)}")
|
||||
raise backend.server.v2.store.exceptions.FileReadError(
|
||||
"Failed to read uploaded file"
|
||||
) from e
|
||||
raise store_exceptions.FileReadError("Failed to read uploaded file") from e
|
||||
|
||||
# Reset file pointer
|
||||
await file.seek(0)
|
||||
@@ -198,14 +191,14 @@ async def upload_media(
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"GCS storage error: {str(e)}")
|
||||
raise backend.server.v2.store.exceptions.StorageUploadError(
|
||||
raise store_exceptions.StorageUploadError(
|
||||
"Failed to upload file to storage"
|
||||
) from e
|
||||
|
||||
except backend.server.v2.store.exceptions.MediaUploadError:
|
||||
except store_exceptions.MediaUploadError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("Unexpected error in upload_media")
|
||||
raise backend.server.v2.store.exceptions.MediaUploadError(
|
||||
raise store_exceptions.MediaUploadError(
|
||||
"Unexpected error during media upload"
|
||||
) from e
|
||||
@@ -6,17 +6,18 @@ import fastapi
|
||||
import pytest
|
||||
import starlette.datastructures
|
||||
|
||||
import backend.server.v2.store.exceptions
|
||||
import backend.server.v2.store.media
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from . import exceptions as store_exceptions
|
||||
from . import media as store_media
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_settings(monkeypatch):
|
||||
settings = Settings()
|
||||
settings.config.media_gcs_bucket_name = "test-bucket"
|
||||
settings.config.google_application_credentials = "test-credentials"
|
||||
monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings)
|
||||
monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings)
|
||||
return settings
|
||||
|
||||
|
||||
@@ -32,12 +33,13 @@ def mock_storage_client(mocker):
|
||||
|
||||
# Mock the constructor to return our mock client
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.media.async_storage.Storage", return_value=mock_client
|
||||
"backend.api.features.store.media.async_storage.Storage",
|
||||
return_value=mock_client,
|
||||
)
|
||||
|
||||
# Mock virus scanner to avoid actual scanning
|
||||
mocker.patch(
|
||||
"backend.server.v2.store.media.scan_content_safe", new_callable=AsyncMock
|
||||
"backend.api.features.store.media.scan_content_safe", new_callable=AsyncMock
|
||||
)
|
||||
|
||||
return mock_client
|
||||
@@ -45,7 +47,7 @@ def mock_storage_client(mocker):
|
||||
|
||||
async def test_upload_media_success(mock_settings, mock_storage_client):
|
||||
# Create test JPEG data with valid signature
|
||||
test_data = b"\xff\xd8\xff" + b"test data"
|
||||
test_data = b"\xFF\xD8\xFF" + b"test data"
|
||||
|
||||
test_file = fastapi.UploadFile(
|
||||
filename="laptop.jpeg",
|
||||
@@ -53,7 +55,7 @@ async def test_upload_media_success(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/images/"
|
||||
@@ -69,8 +71,8 @@ async def test_upload_media_invalid_type(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "text/plain"}),
|
||||
)
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.InvalidFileTypeError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
|
||||
mock_storage_client.upload.assert_not_called()
|
||||
|
||||
@@ -79,16 +81,16 @@ async def test_upload_media_missing_credentials(monkeypatch):
|
||||
settings = Settings()
|
||||
settings.config.media_gcs_bucket_name = ""
|
||||
settings.config.google_application_credentials = ""
|
||||
monkeypatch.setattr("backend.server.v2.store.media.Settings", lambda: settings)
|
||||
monkeypatch.setattr("backend.api.features.store.media.Settings", lambda: settings)
|
||||
|
||||
test_file = fastapi.UploadFile(
|
||||
filename="laptop.jpeg",
|
||||
file=io.BytesIO(b"\xff\xd8\xff" + b"test data"), # Valid JPEG signature
|
||||
file=io.BytesIO(b"\xFF\xD8\xFF" + b"test data"), # Valid JPEG signature
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.StorageConfigError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.StorageConfigError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
|
||||
|
||||
async def test_upload_media_video_type(mock_settings, mock_storage_client):
|
||||
@@ -98,7 +100,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "video/mp4"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/videos/"
|
||||
@@ -108,7 +110,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client):
|
||||
|
||||
|
||||
async def test_upload_media_file_too_large(mock_settings, mock_storage_client):
|
||||
large_data = b"\xff\xd8\xff" + b"x" * (
|
||||
large_data = b"\xFF\xD8\xFF" + b"x" * (
|
||||
50 * 1024 * 1024 + 1
|
||||
) # 50MB + 1 byte with valid JPEG signature
|
||||
test_file = fastapi.UploadFile(
|
||||
@@ -117,8 +119,8 @@ async def test_upload_media_file_too_large(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.FileSizeTooLargeError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.FileSizeTooLargeError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
|
||||
|
||||
async def test_upload_media_file_read_error(mock_settings, mock_storage_client):
|
||||
@@ -129,8 +131,8 @@ async def test_upload_media_file_read_error(mock_settings, mock_storage_client):
|
||||
)
|
||||
test_file.read = unittest.mock.AsyncMock(side_effect=Exception("Read error"))
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.FileReadError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.FileReadError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
|
||||
|
||||
async def test_upload_media_png_success(mock_settings, mock_storage_client):
|
||||
@@ -140,7 +142,7 @@ async def test_upload_media_png_success(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/png"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/images/"
|
||||
)
|
||||
@@ -154,7 +156,7 @@ async def test_upload_media_gif_success(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/gif"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/images/"
|
||||
)
|
||||
@@ -168,7 +170,7 @@ async def test_upload_media_webp_success(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/webp"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/images/"
|
||||
)
|
||||
@@ -182,7 +184,7 @@ async def test_upload_media_webm_success(mock_settings, mock_storage_client):
|
||||
headers=starlette.datastructures.Headers({"content-type": "video/webm"}),
|
||||
)
|
||||
|
||||
result = await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
result = await store_media.upload_media("test-user", test_file)
|
||||
assert result.startswith(
|
||||
"https://storage.googleapis.com/test-bucket/users/test-user/videos/"
|
||||
)
|
||||
@@ -196,8 +198,8 @@ async def test_upload_media_mismatched_signature(mock_settings, mock_storage_cli
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.InvalidFileTypeError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
|
||||
|
||||
async def test_upload_media_invalid_signature(mock_settings, mock_storage_client):
|
||||
@@ -207,5 +209,5 @@ async def test_upload_media_invalid_signature(mock_settings, mock_storage_client
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
with pytest.raises(backend.server.v2.store.exceptions.InvalidFileTypeError):
|
||||
await backend.server.v2.store.media.upload_media("test-user", test_file)
|
||||
with pytest.raises(store_exceptions.InvalidFileTypeError):
|
||||
await store_media.upload_media("test-user", test_file)
|
||||
@@ -7,6 +7,12 @@ import pydantic
|
||||
from backend.util.models import Pagination
|
||||
|
||||
|
||||
class ChangelogEntry(pydantic.BaseModel):
|
||||
version: str
|
||||
changes_summary: str
|
||||
date: datetime.datetime
|
||||
|
||||
|
||||
class MyAgent(pydantic.BaseModel):
|
||||
agent_id: str
|
||||
agent_version: int
|
||||
@@ -55,12 +61,17 @@ class StoreAgentDetails(pydantic.BaseModel):
|
||||
runs: int
|
||||
rating: float
|
||||
versions: list[str]
|
||||
agentGraphVersions: list[str]
|
||||
agentGraphId: str
|
||||
last_updated: datetime.datetime
|
||||
recommended_schedule_cron: str | None = None
|
||||
|
||||
active_version_id: str | None = None
|
||||
has_approved_version: bool = False
|
||||
|
||||
# Optional changelog data when include_changelog=True
|
||||
changelog: list[ChangelogEntry] | None = None
|
||||
|
||||
|
||||
class Creator(pydantic.BaseModel):
|
||||
name: str
|
||||
@@ -2,11 +2,11 @@ import datetime
|
||||
|
||||
import prisma.enums
|
||||
|
||||
import backend.server.v2.store.model
|
||||
from . import model as store_model
|
||||
|
||||
|
||||
def test_pagination():
|
||||
pagination = backend.server.v2.store.model.Pagination(
|
||||
pagination = store_model.Pagination(
|
||||
total_items=100, total_pages=5, current_page=2, page_size=20
|
||||
)
|
||||
assert pagination.total_items == 100
|
||||
@@ -16,7 +16,7 @@ def test_pagination():
|
||||
|
||||
|
||||
def test_store_agent():
|
||||
agent = backend.server.v2.store.model.StoreAgent(
|
||||
agent = store_model.StoreAgent(
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
agent_image="test.jpg",
|
||||
@@ -34,9 +34,9 @@ def test_store_agent():
|
||||
|
||||
|
||||
def test_store_agents_response():
|
||||
response = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
response = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
agent_image="test.jpg",
|
||||
@@ -48,7 +48,7 @@ def test_store_agents_response():
|
||||
rating=4.5,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
total_items=1, total_pages=1, current_page=1, page_size=20
|
||||
),
|
||||
)
|
||||
@@ -57,7 +57,7 @@ def test_store_agents_response():
|
||||
|
||||
|
||||
def test_store_agent_details():
|
||||
details = backend.server.v2.store.model.StoreAgentDetails(
|
||||
details = store_model.StoreAgentDetails(
|
||||
store_listing_version_id="version123",
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
@@ -72,6 +72,8 @@ def test_store_agent_details():
|
||||
runs=50,
|
||||
rating=4.5,
|
||||
versions=["1.0", "2.0"],
|
||||
agentGraphVersions=["1", "2"],
|
||||
agentGraphId="test-graph-id",
|
||||
last_updated=datetime.datetime.now(),
|
||||
)
|
||||
assert details.slug == "test-agent"
|
||||
@@ -81,7 +83,7 @@ def test_store_agent_details():
|
||||
|
||||
|
||||
def test_creator():
|
||||
creator = backend.server.v2.store.model.Creator(
|
||||
creator = store_model.Creator(
|
||||
agent_rating=4.8,
|
||||
agent_runs=1000,
|
||||
name="Test Creator",
|
||||
@@ -96,9 +98,9 @@ def test_creator():
|
||||
|
||||
|
||||
def test_creators_response():
|
||||
response = backend.server.v2.store.model.CreatorsResponse(
|
||||
response = store_model.CreatorsResponse(
|
||||
creators=[
|
||||
backend.server.v2.store.model.Creator(
|
||||
store_model.Creator(
|
||||
agent_rating=4.8,
|
||||
agent_runs=1000,
|
||||
name="Test Creator",
|
||||
@@ -109,7 +111,7 @@ def test_creators_response():
|
||||
is_featured=False,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
total_items=1, total_pages=1, current_page=1, page_size=20
|
||||
),
|
||||
)
|
||||
@@ -118,7 +120,7 @@ def test_creators_response():
|
||||
|
||||
|
||||
def test_creator_details():
|
||||
details = backend.server.v2.store.model.CreatorDetails(
|
||||
details = store_model.CreatorDetails(
|
||||
name="Test Creator",
|
||||
username="creator1",
|
||||
description="Test description",
|
||||
@@ -135,7 +137,7 @@ def test_creator_details():
|
||||
|
||||
|
||||
def test_store_submission():
|
||||
submission = backend.server.v2.store.model.StoreSubmission(
|
||||
submission = store_model.StoreSubmission(
|
||||
agent_id="agent123",
|
||||
agent_version=1,
|
||||
sub_heading="Test subheading",
|
||||
@@ -154,9 +156,9 @@ def test_store_submission():
|
||||
|
||||
|
||||
def test_store_submissions_response():
|
||||
response = backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
response = store_model.StoreSubmissionsResponse(
|
||||
submissions=[
|
||||
backend.server.v2.store.model.StoreSubmission(
|
||||
store_model.StoreSubmission(
|
||||
agent_id="agent123",
|
||||
agent_version=1,
|
||||
sub_heading="Test subheading",
|
||||
@@ -170,7 +172,7 @@ def test_store_submissions_response():
|
||||
rating=4.5,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
total_items=1, total_pages=1, current_page=1, page_size=20
|
||||
),
|
||||
)
|
||||
@@ -179,7 +181,7 @@ def test_store_submissions_response():
|
||||
|
||||
|
||||
def test_store_submission_request():
|
||||
request = backend.server.v2.store.model.StoreSubmissionRequest(
|
||||
request = store_model.StoreSubmissionRequest(
|
||||
agent_id="agent123",
|
||||
agent_version=1,
|
||||
slug="test-agent",
|
||||
@@ -9,14 +9,14 @@ import fastapi
|
||||
import fastapi.responses
|
||||
|
||||
import backend.data.graph
|
||||
import backend.server.v2.store.cache as store_cache
|
||||
import backend.server.v2.store.db
|
||||
import backend.server.v2.store.exceptions
|
||||
import backend.server.v2.store.image_gen
|
||||
import backend.server.v2.store.media
|
||||
import backend.server.v2.store.model
|
||||
import backend.util.json
|
||||
|
||||
from . import cache as store_cache
|
||||
from . import db as store_db
|
||||
from . import image_gen as store_image_gen
|
||||
from . import media as store_media
|
||||
from . import model as store_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
@@ -32,7 +32,7 @@ router = fastapi.APIRouter()
|
||||
summary="Get user profile",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.ProfileDetails,
|
||||
response_model=store_model.ProfileDetails,
|
||||
)
|
||||
async def get_profile(
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
@@ -41,7 +41,7 @@ async def get_profile(
|
||||
Get the profile details for the authenticated user.
|
||||
Cached for 1 hour per user.
|
||||
"""
|
||||
profile = await backend.server.v2.store.db.get_user_profile(user_id)
|
||||
profile = await store_db.get_user_profile(user_id)
|
||||
if profile is None:
|
||||
return fastapi.responses.JSONResponse(
|
||||
status_code=404,
|
||||
@@ -55,10 +55,10 @@ async def get_profile(
|
||||
summary="Update user profile",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.CreatorDetails,
|
||||
response_model=store_model.CreatorDetails,
|
||||
)
|
||||
async def update_or_create_profile(
|
||||
profile: backend.server.v2.store.model.Profile,
|
||||
profile: store_model.Profile,
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
@@ -74,9 +74,7 @@ async def update_or_create_profile(
|
||||
Raises:
|
||||
HTTPException: If there is an error updating the profile
|
||||
"""
|
||||
updated_profile = await backend.server.v2.store.db.update_profile(
|
||||
user_id=user_id, profile=profile
|
||||
)
|
||||
updated_profile = await store_db.update_profile(user_id=user_id, profile=profile)
|
||||
return updated_profile
|
||||
|
||||
|
||||
@@ -89,7 +87,7 @@ async def update_or_create_profile(
|
||||
"/agents",
|
||||
summary="List store agents",
|
||||
tags=["store", "public"],
|
||||
response_model=backend.server.v2.store.model.StoreAgentsResponse,
|
||||
response_model=store_model.StoreAgentsResponse,
|
||||
)
|
||||
async def get_agents(
|
||||
featured: bool = False,
|
||||
@@ -152,9 +150,13 @@ async def get_agents(
|
||||
"/agents/{username}/{agent_name}",
|
||||
summary="Get specific agent",
|
||||
tags=["store", "public"],
|
||||
response_model=backend.server.v2.store.model.StoreAgentDetails,
|
||||
response_model=store_model.StoreAgentDetails,
|
||||
)
|
||||
async def get_agent(username: str, agent_name: str):
|
||||
async def get_agent(
|
||||
username: str,
|
||||
agent_name: str,
|
||||
include_changelog: bool = fastapi.Query(default=False),
|
||||
):
|
||||
"""
|
||||
This is only used on the AgentDetails Page.
|
||||
|
||||
@@ -164,7 +166,7 @@ async def get_agent(username: str, agent_name: str):
|
||||
# URL decode the agent name since it comes from the URL path
|
||||
agent_name = urllib.parse.unquote(agent_name).lower()
|
||||
agent = await store_cache._get_cached_agent_details(
|
||||
username=username, agent_name=agent_name
|
||||
username=username, agent_name=agent_name, include_changelog=include_changelog
|
||||
)
|
||||
return agent
|
||||
|
||||
@@ -175,13 +177,13 @@ async def get_agent(username: str, agent_name: str):
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
)
|
||||
async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: str):
|
||||
async def get_graph_meta_by_store_listing_version_id(
|
||||
store_listing_version_id: str,
|
||||
) -> backend.data.graph.GraphMeta:
|
||||
"""
|
||||
Get Agent Graph from Store Listing Version ID.
|
||||
"""
|
||||
graph = await backend.server.v2.store.db.get_available_graph(
|
||||
store_listing_version_id
|
||||
)
|
||||
graph = await store_db.get_available_graph(store_listing_version_id)
|
||||
return graph
|
||||
|
||||
|
||||
@@ -190,15 +192,13 @@ async def get_graph_meta_by_store_listing_version_id(store_listing_version_id: s
|
||||
summary="Get agent by version",
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.StoreAgentDetails,
|
||||
response_model=store_model.StoreAgentDetails,
|
||||
)
|
||||
async def get_store_agent(store_listing_version_id: str):
|
||||
"""
|
||||
Get Store Agent Details from Store Listing Version ID.
|
||||
"""
|
||||
agent = await backend.server.v2.store.db.get_store_agent_by_version_id(
|
||||
store_listing_version_id
|
||||
)
|
||||
agent = await store_db.get_store_agent_by_version_id(store_listing_version_id)
|
||||
|
||||
return agent
|
||||
|
||||
@@ -208,12 +208,12 @@ async def get_store_agent(store_listing_version_id: str):
|
||||
summary="Create agent review",
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.StoreReview,
|
||||
response_model=store_model.StoreReview,
|
||||
)
|
||||
async def create_review(
|
||||
username: str,
|
||||
agent_name: str,
|
||||
review: backend.server.v2.store.model.StoreReviewCreate,
|
||||
review: store_model.StoreReviewCreate,
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
@@ -231,7 +231,7 @@ async def create_review(
|
||||
username = urllib.parse.unquote(username).lower()
|
||||
agent_name = urllib.parse.unquote(agent_name).lower()
|
||||
# Create the review
|
||||
created_review = await backend.server.v2.store.db.create_store_review(
|
||||
created_review = await store_db.create_store_review(
|
||||
user_id=user_id,
|
||||
store_listing_version_id=review.store_listing_version_id,
|
||||
score=review.score,
|
||||
@@ -250,7 +250,7 @@ async def create_review(
|
||||
"/creators",
|
||||
summary="List store creators",
|
||||
tags=["store", "public"],
|
||||
response_model=backend.server.v2.store.model.CreatorsResponse,
|
||||
response_model=store_model.CreatorsResponse,
|
||||
)
|
||||
async def get_creators(
|
||||
featured: bool = False,
|
||||
@@ -295,7 +295,7 @@ async def get_creators(
|
||||
"/creator/{username}",
|
||||
summary="Get creator details",
|
||||
tags=["store", "public"],
|
||||
response_model=backend.server.v2.store.model.CreatorDetails,
|
||||
response_model=store_model.CreatorDetails,
|
||||
)
|
||||
async def get_creator(
|
||||
username: str,
|
||||
@@ -319,7 +319,7 @@ async def get_creator(
|
||||
summary="Get my agents",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.MyAgentsResponse,
|
||||
response_model=store_model.MyAgentsResponse,
|
||||
)
|
||||
async def get_my_agents(
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
@@ -329,9 +329,7 @@ async def get_my_agents(
|
||||
"""
|
||||
Get user's own agents.
|
||||
"""
|
||||
agents = await backend.server.v2.store.db.get_my_agents(
|
||||
user_id, page=page, page_size=page_size
|
||||
)
|
||||
agents = await store_db.get_my_agents(user_id, page=page, page_size=page_size)
|
||||
return agents
|
||||
|
||||
|
||||
@@ -356,7 +354,7 @@ async def delete_submission(
|
||||
Returns:
|
||||
bool: True if the submission was successfully deleted, False otherwise
|
||||
"""
|
||||
result = await backend.server.v2.store.db.delete_store_submission(
|
||||
result = await store_db.delete_store_submission(
|
||||
user_id=user_id,
|
||||
submission_id=submission_id,
|
||||
)
|
||||
@@ -369,7 +367,7 @@ async def delete_submission(
|
||||
summary="List my submissions",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.StoreSubmissionsResponse,
|
||||
response_model=store_model.StoreSubmissionsResponse,
|
||||
)
|
||||
async def get_submissions(
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
@@ -399,7 +397,7 @@ async def get_submissions(
|
||||
raise fastapi.HTTPException(
|
||||
status_code=422, detail="Page size must be greater than 0"
|
||||
)
|
||||
listings = await backend.server.v2.store.db.get_store_submissions(
|
||||
listings = await store_db.get_store_submissions(
|
||||
user_id=user_id,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
@@ -412,10 +410,10 @@ async def get_submissions(
|
||||
summary="Create store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.StoreSubmission,
|
||||
response_model=store_model.StoreSubmission,
|
||||
)
|
||||
async def create_submission(
|
||||
submission_request: backend.server.v2.store.model.StoreSubmissionRequest,
|
||||
submission_request: store_model.StoreSubmissionRequest,
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
@@ -431,7 +429,7 @@ async def create_submission(
|
||||
Raises:
|
||||
HTTPException: If there is an error creating the submission
|
||||
"""
|
||||
result = await backend.server.v2.store.db.create_store_submission(
|
||||
result = await store_db.create_store_submission(
|
||||
user_id=user_id,
|
||||
agent_id=submission_request.agent_id,
|
||||
agent_version=submission_request.agent_version,
|
||||
@@ -456,11 +454,11 @@ async def create_submission(
|
||||
summary="Edit store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=backend.server.v2.store.model.StoreSubmission,
|
||||
response_model=store_model.StoreSubmission,
|
||||
)
|
||||
async def edit_submission(
|
||||
store_listing_version_id: str,
|
||||
submission_request: backend.server.v2.store.model.StoreSubmissionEditRequest,
|
||||
submission_request: store_model.StoreSubmissionEditRequest,
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
@@ -477,7 +475,7 @@ async def edit_submission(
|
||||
Raises:
|
||||
HTTPException: If there is an error editing the submission
|
||||
"""
|
||||
result = await backend.server.v2.store.db.edit_store_submission(
|
||||
result = await store_db.edit_store_submission(
|
||||
user_id=user_id,
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
name=submission_request.name,
|
||||
@@ -518,9 +516,7 @@ async def upload_submission_media(
|
||||
Raises:
|
||||
HTTPException: If there is an error uploading the media
|
||||
"""
|
||||
media_url = await backend.server.v2.store.media.upload_media(
|
||||
user_id=user_id, file=file
|
||||
)
|
||||
media_url = await store_media.upload_media(user_id=user_id, file=file)
|
||||
return media_url
|
||||
|
||||
|
||||
@@ -555,14 +551,12 @@ async def generate_image(
|
||||
# Use .jpeg here since we are generating JPEG images
|
||||
filename = f"agent_{agent_id}.jpeg"
|
||||
|
||||
existing_url = await backend.server.v2.store.media.check_media_exists(
|
||||
user_id, filename
|
||||
)
|
||||
existing_url = await store_media.check_media_exists(user_id, filename)
|
||||
if existing_url:
|
||||
logger.info(f"Using existing image for agent {agent_id}")
|
||||
return fastapi.responses.JSONResponse(content={"image_url": existing_url})
|
||||
# Generate agent image as JPEG
|
||||
image = await backend.server.v2.store.image_gen.generate_agent_image(agent=agent)
|
||||
image = await store_image_gen.generate_agent_image(agent=agent)
|
||||
|
||||
# Create UploadFile with the correct filename and content_type
|
||||
image_file = fastapi.UploadFile(
|
||||
@@ -570,7 +564,7 @@ async def generate_image(
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
image_url = await backend.server.v2.store.media.upload_media(
|
||||
image_url = await store_media.upload_media(
|
||||
user_id=user_id, file=image_file, use_file_name=True
|
||||
)
|
||||
|
||||
@@ -599,7 +593,7 @@ async def download_agent_file(
|
||||
Raises:
|
||||
HTTPException: If the agent is not found or an unexpected error occurs.
|
||||
"""
|
||||
graph_data = await backend.server.v2.store.db.get_agent(store_listing_version_id)
|
||||
graph_data = await store_db.get_agent(store_listing_version_id)
|
||||
file_name = f"agent_{graph_data.id}_v{graph_data.version or 'latest'}.json"
|
||||
|
||||
# Sending graph as a stream (similar to marketplace v1)
|
||||
@@ -8,15 +8,15 @@ import pytest
|
||||
import pytest_mock
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
import backend.server.v2.store.model
|
||||
import backend.server.v2.store.routes
|
||||
from . import model as store_model
|
||||
from . import routes as store_routes
|
||||
|
||||
# Using a fixed timestamp for reproducible tests
|
||||
# 2023 date is intentionally used to ensure tests work regardless of current year
|
||||
FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0)
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(backend.server.v2.store.routes.router)
|
||||
app.include_router(store_routes.router)
|
||||
|
||||
client = fastapi.testclient.TestClient(app)
|
||||
|
||||
@@ -35,23 +35,21 @@ def test_get_agents_defaults(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=0,
|
||||
total_items=0,
|
||||
total_pages=0,
|
||||
page_size=10,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert data.pagination.total_pages == 0
|
||||
assert data.agents == []
|
||||
|
||||
@@ -72,9 +70,9 @@ def test_get_agents_featured(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="featured-agent",
|
||||
agent_name="Featured Agent",
|
||||
agent_image="featured.jpg",
|
||||
@@ -86,20 +84,18 @@ def test_get_agents_featured(
|
||||
rating=4.5,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?featured=true")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 1
|
||||
assert data.agents[0].slug == "featured-agent"
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -119,9 +115,9 @@ def test_get_agents_by_creator(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="creator-agent",
|
||||
agent_name="Creator Agent",
|
||||
agent_image="agent.jpg",
|
||||
@@ -133,20 +129,18 @@ def test_get_agents_by_creator(
|
||||
rating=4.0,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?creator=specific-creator")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 1
|
||||
assert data.agents[0].creator == "specific-creator"
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -166,9 +160,9 @@ def test_get_agents_sorted(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="top-agent",
|
||||
agent_name="Top Agent",
|
||||
agent_image="top.jpg",
|
||||
@@ -180,20 +174,18 @@ def test_get_agents_sorted(
|
||||
rating=5.0,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?sorted_by=runs")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 1
|
||||
assert data.agents[0].runs == 1000
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -213,9 +205,9 @@ def test_get_agents_search(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="search-agent",
|
||||
agent_name="Search Agent",
|
||||
agent_image="search.jpg",
|
||||
@@ -227,20 +219,18 @@ def test_get_agents_search(
|
||||
rating=4.2,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?search_query=specific")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 1
|
||||
assert "specific" in data.agents[0].description.lower()
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -260,9 +250,9 @@ def test_get_agents_category(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug="category-agent",
|
||||
agent_name="Category Agent",
|
||||
agent_image="category.jpg",
|
||||
@@ -274,20 +264,18 @@ def test_get_agents_category(
|
||||
rating=4.1,
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?category=test-category")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 1
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
snapshot.assert_match(json.dumps(response.json(), indent=2), "agts_category")
|
||||
@@ -306,9 +294,9 @@ def test_get_agents_pagination(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentsResponse(
|
||||
mocked_value = store_model.StoreAgentsResponse(
|
||||
agents=[
|
||||
backend.server.v2.store.model.StoreAgent(
|
||||
store_model.StoreAgent(
|
||||
slug=f"agent-{i}",
|
||||
agent_name=f"Agent {i}",
|
||||
agent_image=f"agent{i}.jpg",
|
||||
@@ -321,20 +309,18 @@ def test_get_agents_pagination(
|
||||
)
|
||||
for i in range(5)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=2,
|
||||
total_items=15,
|
||||
total_pages=3,
|
||||
page_size=5,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.return_value = mocked_value
|
||||
response = client.get("/agents?page=2&page_size=5")
|
||||
assert response.status_code == 200
|
||||
data = backend.server.v2.store.model.StoreAgentsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentsResponse.model_validate(response.json())
|
||||
assert len(data.agents) == 5
|
||||
assert data.pagination.current_page == 2
|
||||
assert data.pagination.page_size == 5
|
||||
@@ -365,7 +351,7 @@ def test_get_agents_malformed_request(mocker: pytest_mock.MockFixture):
|
||||
assert response.status_code == 422
|
||||
|
||||
# Verify no DB calls were made
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agents")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agents")
|
||||
mock_db_call.assert_not_called()
|
||||
|
||||
|
||||
@@ -373,7 +359,7 @@ def test_get_agent_details(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreAgentDetails(
|
||||
mocked_value = store_model.StoreAgentDetails(
|
||||
store_listing_version_id="test-version-id",
|
||||
slug="test-agent",
|
||||
agent_name="Test Agent",
|
||||
@@ -388,46 +374,46 @@ def test_get_agent_details(
|
||||
runs=100,
|
||||
rating=4.5,
|
||||
versions=["1.0.0", "1.1.0"],
|
||||
agentGraphVersions=["1", "2"],
|
||||
agentGraphId="test-graph-id",
|
||||
last_updated=FIXED_NOW,
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_agent_details")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_agent_details")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/agents/creator1/test-agent")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.StoreAgentDetails.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreAgentDetails.model_validate(response.json())
|
||||
assert data.agent_name == "Test Agent"
|
||||
assert data.creator == "creator1"
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
snapshot.assert_match(json.dumps(response.json(), indent=2), "agt_details")
|
||||
mock_db_call.assert_called_once_with(username="creator1", agent_name="test-agent")
|
||||
mock_db_call.assert_called_once_with(
|
||||
username="creator1", agent_name="test-agent", include_changelog=False
|
||||
)
|
||||
|
||||
|
||||
def test_get_creators_defaults(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.CreatorsResponse(
|
||||
mocked_value = store_model.CreatorsResponse(
|
||||
creators=[],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=0,
|
||||
total_items=0,
|
||||
total_pages=0,
|
||||
page_size=10,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/creators")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.CreatorsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.CreatorsResponse.model_validate(response.json())
|
||||
assert data.pagination.total_pages == 0
|
||||
assert data.creators == []
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -441,9 +427,9 @@ def test_get_creators_pagination(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.CreatorsResponse(
|
||||
mocked_value = store_model.CreatorsResponse(
|
||||
creators=[
|
||||
backend.server.v2.store.model.Creator(
|
||||
store_model.Creator(
|
||||
name=f"Creator {i}",
|
||||
username=f"creator{i}",
|
||||
description=f"Creator {i} description",
|
||||
@@ -455,22 +441,20 @@ def test_get_creators_pagination(
|
||||
)
|
||||
for i in range(5)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=2,
|
||||
total_items=15,
|
||||
total_pages=3,
|
||||
page_size=5,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/creators?page=2&page_size=5")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.CreatorsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.CreatorsResponse.model_validate(response.json())
|
||||
assert len(data.creators) == 5
|
||||
assert data.pagination.current_page == 2
|
||||
assert data.pagination.page_size == 5
|
||||
@@ -495,7 +479,7 @@ def test_get_creators_malformed_request(mocker: pytest_mock.MockFixture):
|
||||
assert response.status_code == 422
|
||||
|
||||
# Verify no DB calls were made
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creators")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_creators")
|
||||
mock_db_call.assert_not_called()
|
||||
|
||||
|
||||
@@ -503,7 +487,7 @@ def test_get_creator_details(
|
||||
mocker: pytest_mock.MockFixture,
|
||||
snapshot: Snapshot,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.CreatorDetails(
|
||||
mocked_value = store_model.CreatorDetails(
|
||||
name="Test User",
|
||||
username="creator1",
|
||||
description="Test creator description",
|
||||
@@ -513,13 +497,15 @@ def test_get_creator_details(
|
||||
agent_runs=1000,
|
||||
top_categories=["category1", "category2"],
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_creator_details")
|
||||
mock_db_call = mocker.patch(
|
||||
"backend.api.features.store.db.get_store_creator_details"
|
||||
)
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/creator/creator1")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.CreatorDetails.model_validate(response.json())
|
||||
data = store_model.CreatorDetails.model_validate(response.json())
|
||||
assert data.username == "creator1"
|
||||
assert data.name == "Test User"
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -532,9 +518,9 @@ def test_get_submissions_success(
|
||||
snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
mocked_value = store_model.StoreSubmissionsResponse(
|
||||
submissions=[
|
||||
backend.server.v2.store.model.StoreSubmission(
|
||||
store_model.StoreSubmission(
|
||||
name="Test Agent",
|
||||
description="Test agent description",
|
||||
image_urls=["test.jpg"],
|
||||
@@ -550,22 +536,20 @@ def test_get_submissions_success(
|
||||
categories=["test-category"],
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=1,
|
||||
total_items=1,
|
||||
total_pages=1,
|
||||
page_size=20,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/submissions")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreSubmissionsResponse.model_validate(response.json())
|
||||
assert len(data.submissions) == 1
|
||||
assert data.submissions[0].name == "Test Agent"
|
||||
assert data.pagination.current_page == 1
|
||||
@@ -579,24 +563,22 @@ def test_get_submissions_pagination(
|
||||
snapshot: Snapshot,
|
||||
test_user_id: str,
|
||||
) -> None:
|
||||
mocked_value = backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
mocked_value = store_model.StoreSubmissionsResponse(
|
||||
submissions=[],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
pagination=store_model.Pagination(
|
||||
current_page=2,
|
||||
total_items=10,
|
||||
total_pages=2,
|
||||
page_size=5,
|
||||
),
|
||||
)
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions")
|
||||
mock_db_call.return_value = mocked_value
|
||||
|
||||
response = client.get("/submissions?page=2&page_size=5")
|
||||
assert response.status_code == 200
|
||||
|
||||
data = backend.server.v2.store.model.StoreSubmissionsResponse.model_validate(
|
||||
response.json()
|
||||
)
|
||||
data = store_model.StoreSubmissionsResponse.model_validate(response.json())
|
||||
assert data.pagination.current_page == 2
|
||||
assert data.pagination.page_size == 5
|
||||
snapshot.snapshot_dir = "snapshots"
|
||||
@@ -618,5 +600,5 @@ def test_get_submissions_malformed_request(mocker: pytest_mock.MockFixture):
|
||||
assert response.status_code == 422
|
||||
|
||||
# Verify no DB calls were made
|
||||
mock_db_call = mocker.patch("backend.server.v2.store.db.get_store_submissions")
|
||||
mock_db_call = mocker.patch("backend.api.features.store.db.get_store_submissions")
|
||||
mock_db_call.assert_not_called()
|
||||
@@ -8,10 +8,11 @@ from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.server.v2.store import cache as store_cache
|
||||
from backend.server.v2.store.model import StoreAgent, StoreAgentsResponse
|
||||
from backend.util.models import Pagination
|
||||
|
||||
from . import cache as store_cache
|
||||
from .model import StoreAgent, StoreAgentsResponse
|
||||
|
||||
|
||||
class TestCacheDeletion:
|
||||
"""Test cache deletion functionality for store routes."""
|
||||
@@ -43,7 +44,7 @@ class TestCacheDeletion:
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.server.v2.store.db.get_store_agents",
|
||||
"backend.api.features.store.db.get_store_agents",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_response,
|
||||
) as mock_db:
|
||||
@@ -152,7 +153,7 @@ class TestCacheDeletion:
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.server.v2.store.db.get_store_agents",
|
||||
"backend.api.features.store.db.get_store_agents",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_response,
|
||||
):
|
||||
@@ -203,7 +204,7 @@ class TestCacheDeletion:
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.server.v2.store.db.get_store_agents",
|
||||
"backend.api.features.store.db.get_store_agents",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_response,
|
||||
) as mock_db:
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user