Files
AutoGPT/autogpt_platform/docker-compose.platform.yml
Swifty 843c487500 feat(backend): add prisma types stub generator for pyright compatibility (#11736)
Prisma's generated `types.py` file is 57,000+ lines with complex
recursive TypedDict definitions that exhaust Pyright's type inference
budget. This causes random type errors and makes the type checker
unreliable.

### Changes 🏗️

- Add `gen_prisma_types_stub.py` script that generates a lightweight
`.pyi` stub file
- The stub preserves safe types (Literal, TypeVar) while collapsing
complex TypedDicts to `dict[str, Any]`
- Integrate stub generation into all workflows that run `prisma
generate`:
  - `platform-backend-ci.yml`
  - `claude.yml`
  - `claude-dependabot.yml`
  - `copilot-setup-steps.yml`
  - `docker-compose.platform.yml`
  - `Dockerfile`
  - `Makefile` (migrate & reset-db targets)
  - `linter.py` (lint & format commands)
- Add `gen-prisma-stub` poetry script entry
- Fix two pre-existing type errors that were previously masked:
- `store/db.py`: Replace private type
`_StoreListingVersion_version_OrderByInput` with dict literal
  - `airtable/_webhook.py`: Add cast for `Serializable` type

### Checklist 📋

#### For code changes:
- [x] I have clearly listed my changes in the PR description
- [x] I have made a test plan
- [x] I have tested my changes according to the test plan:
  - [x] Run `poetry run format` - passes with 0 errors (down from 57+)
  - [x] Run `poetry run lint` - passes with 0 errors
  - [x] Run `poetry run gen-prisma-stub` - generates stub successfully
- [x] Verify stub file is created at correct location with proper
content

#### For configuration changes:
- [x] `.env.default` is updated or already compatible with my changes
- [x] `docker-compose.yml` is updated or already compatible with my
changes
- [x] I have included a list of my configuration changes in the PR
description (under **Changes**)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Chores**
* Added a lightweight Prisma type-stub generator and integrated it into
build, lint, CI/CD, and container workflows.
* Build, migration, formatting, and lint steps now generate these stubs
to improve type-checking performance and reduce overhead during builds
and deployments.
  * Exposed a project command to run stub generation manually.

<sub>✏️ Tip: You can customize this high-level summary in your review
settings.</sub>
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
2026-01-09 16:31:10 +01:00

339 lines
8.6 KiB
YAML

# Environment Variable Loading Order (first → last, later overrides earlier):
# 1. backend/.env.default - Default values for all settings
# 2. backend/.env - User's custom configuration (if exists)
# 3. environment key - Docker-specific overrides defined below
# 4. Shell environment - Variables exported before running docker compose
# 5. CLI arguments - docker compose run -e VAR=value
# Common backend environment - Docker service names
x-backend-env: &backend-env # Docker internal service hostnames (override localhost defaults)
PYRO_HOST: "0.0.0.0"
AGENTSERVER_HOST: rest_server
SCHEDULER_HOST: scheduler_server
DATABASEMANAGER_HOST: database_manager
EXECUTIONMANAGER_HOST: executor
NOTIFICATIONMANAGER_HOST: notification_server
CLAMAV_SERVICE_HOST: clamav
DB_HOST: db
REDIS_HOST: redis
RABBITMQ_HOST: rabbitmq
# Override Supabase URL for Docker network
SUPABASE_URL: http://kong:8000
# Database connection string for Docker network
# This cannot be constructed like in .env because we cannot interpolate values set here (DB_HOST)
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
# Common env_file configuration for backend services
x-backend-env-files: &backend-env-files
env_file:
- backend/.env.default # Base defaults (always exists)
- path: backend/.env # User overrides (optional)
required: false
services:
migrate:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: migrate
command: ["sh", "-c", "poetry run prisma generate && poetry run gen-prisma-stub && poetry run prisma migrate deploy"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/migrations
action: rebuild
depends_on:
db:
condition: service_healthy
<<: *backend-env-files
environment:
<<: *backend-env
networks:
- app-network
restart: on-failure
healthcheck:
test:
[
"CMD-SHELL",
"poetry run prisma migrate status | grep -q 'No pending migrations' || exit 1",
]
interval: 30s
timeout: 10s
retries: 3
start_period: 5s
redis:
image: redis:latest
ports:
- "6379:6379"
networks:
- app-network
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
rabbitmq:
image: rabbitmq:management
container_name: rabbitmq
healthcheck:
test: rabbitmq-diagnostics -q ping
interval: 30s
timeout: 10s
retries: 5
start_period: 10s
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "5672:5672"
- "15672:15672"
rest_server:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.rest"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
redis:
condition: service_healthy
db:
condition: service_healthy
migrate:
condition: service_completed_successfully
rabbitmq:
condition: service_healthy
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8006:8006"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
executor:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.exec"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
db:
condition: service_healthy
migrate:
condition: service_completed_successfully
database_manager:
condition: service_started
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8002:8002"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
websocket_server:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.ws"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
migrate:
condition: service_completed_successfully
database_manager:
condition: service_started
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8001:8001"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
database_manager:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.db"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
db:
condition: service_healthy
migrate:
condition: service_completed_successfully
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8005:8005"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
scheduler_server:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.scheduler"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
db:
condition: service_healthy
redis:
condition: service_healthy
rabbitmq:
condition: service_healthy
migrate:
condition: service_completed_successfully
database_manager:
condition: service_started
# healthcheck:
# test:
# [
# "CMD",
# "curl",
# "-f",
# "-X",
# "POST",
# "http://localhost:8003/health_check",
# ]
# interval: 10s
# timeout: 10s
# retries: 5
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8003:8003"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
notification_server:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
command: ["python", "-m", "backend.notification"]
develop:
watch:
- path: ./
target: autogpt_platform/backend/
action: rebuild
depends_on:
db:
condition: service_healthy
rabbitmq:
condition: service_healthy
migrate:
condition: service_completed_successfully
database_manager:
condition: service_started
<<: *backend-env-files
environment:
<<: *backend-env
ports:
- "8007:8007"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
frontend:
build:
context: ../
dockerfile: autogpt_platform/frontend/Dockerfile
target: prod
args:
NEXT_PUBLIC_PW_TEST: ${NEXT_PUBLIC_PW_TEST:-false}
depends_on:
db:
condition: service_healthy
migrate:
condition: service_completed_successfully
ports:
- "3000:3000"
networks:
- app-network
logging:
driver: json-file
options:
max-size: "10m"
max-file: "3"
# Load environment variables in order (later overrides earlier)
env_file:
- path: ./frontend/.env.default # Base defaults (always exists)
- path: ./frontend/.env # User overrides (optional)
required: false
environment:
# Server-side environment variables (Docker service names)
# These override the localhost URLs from env files when running in Docker
AUTH_CALLBACK_URL: http://rest_server:8006/auth/callback
SUPABASE_URL: http://kong:8000
AGPT_SERVER_URL: http://rest_server:8006/api
AGPT_WS_SERVER_URL: ws://websocket_server:8001/ws
networks:
app-network:
driver: bridge