diff --git a/.dockerignore b/.dockerignore index 9b744e7f9b..427cab29f4 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,42 +5,13 @@ !docs/ # Platform - Libs -!autogpt_platform/autogpt_libs/autogpt_libs/ -!autogpt_platform/autogpt_libs/pyproject.toml -!autogpt_platform/autogpt_libs/poetry.lock -!autogpt_platform/autogpt_libs/README.md +!autogpt_platform/autogpt_libs/ # Platform - Backend -!autogpt_platform/backend/backend/ -!autogpt_platform/backend/test/e2e_test_data.py -!autogpt_platform/backend/migrations/ -!autogpt_platform/backend/schema.prisma -!autogpt_platform/backend/pyproject.toml -!autogpt_platform/backend/poetry.lock -!autogpt_platform/backend/README.md -!autogpt_platform/backend/.env -!autogpt_platform/backend/gen_prisma_types_stub.py - -# Platform - Market -!autogpt_platform/market/market/ -!autogpt_platform/market/scripts.py -!autogpt_platform/market/schema.prisma -!autogpt_platform/market/pyproject.toml -!autogpt_platform/market/poetry.lock -!autogpt_platform/market/README.md +!autogpt_platform/backend/ # Platform - Frontend -!autogpt_platform/frontend/src/ -!autogpt_platform/frontend/public/ -!autogpt_platform/frontend/scripts/ -!autogpt_platform/frontend/package.json -!autogpt_platform/frontend/pnpm-lock.yaml -!autogpt_platform/frontend/tsconfig.json -!autogpt_platform/frontend/README.md -## config -!autogpt_platform/frontend/*.config.* -!autogpt_platform/frontend/.env.* -!autogpt_platform/frontend/.env +!autogpt_platform/frontend/ # Classic - AutoGPT !classic/original_autogpt/autogpt/ @@ -64,6 +35,38 @@ # Classic - Frontend !classic/frontend/build/web/ -# Explicitly re-ignore some folders -.* -**/__pycache__ +# Explicitly re-ignore unwanted files from whitelisted directories +# Note: These patterns MUST come after the whitelist rules to take effect + +# Hidden files and directories (but keep frontend .env files needed for build) +**/.* +!autogpt_platform/frontend/.env +!autogpt_platform/frontend/.env.default +!autogpt_platform/frontend/.env.production + +# Python artifacts +**/__pycache__/ +**/*.pyc +**/*.pyo +**/.venv/ +**/.ruff_cache/ +**/.pytest_cache/ +**/.coverage +**/htmlcov/ + +# Node artifacts +**/node_modules/ +**/.next/ +**/storybook-static/ +**/playwright-report/ +**/test-results/ + +# Build artifacts +**/dist/ +**/build/ +!autogpt_platform/frontend/src/**/build/ +**/target/ + +# Logs and temp files +**/*.log +**/*.tmp diff --git a/.github/workflows/platform-frontend-ci.yml b/.github/workflows/platform-frontend-ci.yml index 6410daae9f..4bf8a2b80c 100644 --- a/.github/workflows/platform-frontend-ci.yml +++ b/.github/workflows/platform-frontend-ci.yml @@ -26,7 +26,6 @@ jobs: setup: runs-on: ubuntu-latest outputs: - cache-key: ${{ steps.cache-key.outputs.key }} components-changed: ${{ steps.filter.outputs.components }} steps: @@ -41,28 +40,17 @@ jobs: components: - 'autogpt_platform/frontend/src/components/**' - - name: Set up Node.js - uses: actions/setup-node@v6 - with: - node-version: "22.18.0" - - name: Enable corepack run: corepack enable - - name: Generate cache key - id: cache-key - run: echo "key=${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}" >> $GITHUB_OUTPUT - - - name: Cache dependencies - uses: actions/cache@v5 + - name: Set up Node + uses: actions/setup-node@v6 with: - path: ~/.pnpm-store - key: ${{ steps.cache-key.outputs.key }} - restore-keys: | - ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} - ${{ runner.os }}-pnpm- + node-version: "22.18.0" + cache: "pnpm" + cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - - name: Install dependencies + - name: Install dependencies to populate cache run: pnpm install --frozen-lockfile lint: @@ -73,22 +61,15 @@ jobs: - name: Checkout repository uses: actions/checkout@v6 - - name: Set up Node.js - uses: actions/setup-node@v6 - with: - node-version: "22.18.0" - - name: Enable corepack run: corepack enable - - name: Restore dependencies cache - uses: actions/cache@v5 + - name: Set up Node + uses: actions/setup-node@v6 with: - path: ~/.pnpm-store - key: ${{ needs.setup.outputs.cache-key }} - restore-keys: | - ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} - ${{ runner.os }}-pnpm- + node-version: "22.18.0" + cache: "pnpm" + cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile @@ -111,22 +92,15 @@ jobs: with: fetch-depth: 0 - - name: Set up Node.js - uses: actions/setup-node@v6 - with: - node-version: "22.18.0" - - name: Enable corepack run: corepack enable - - name: Restore dependencies cache - uses: actions/cache@v5 + - name: Set up Node + uses: actions/setup-node@v6 with: - path: ~/.pnpm-store - key: ${{ needs.setup.outputs.cache-key }} - restore-keys: | - ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} - ${{ runner.os }}-pnpm- + node-version: "22.18.0" + cache: "pnpm" + cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile @@ -141,10 +115,8 @@ jobs: exitOnceUploaded: true e2e_test: + name: end-to-end tests runs-on: big-boi - needs: setup - strategy: - fail-fast: false steps: - name: Checkout repository @@ -152,19 +124,11 @@ jobs: with: submodules: recursive - - name: Set up Node.js - uses: actions/setup-node@v6 - with: - node-version: "22.18.0" - - - name: Enable corepack - run: corepack enable - - - name: Copy default supabase .env + - name: Set up Platform - Copy default supabase .env run: | cp ../.env.default ../.env - - name: Copy backend .env and set OpenAI API key + - name: Set up Platform - Copy backend .env and set OpenAI API key run: | cp ../backend/.env.default ../backend/.env echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env @@ -172,77 +136,125 @@ jobs: # Used by E2E test data script to generate embeddings for approved store agents OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - - name: Set up Docker Buildx + - name: Set up Platform - Set up Docker Buildx uses: docker/setup-buildx-action@v3 + with: + driver: docker-container + driver-opts: network=host - - name: Cache Docker layers + - name: Set up Platform - Expose GHA cache to docker buildx CLI + uses: crazy-max/ghaction-github-runtime@v3 + + - name: Set up Platform - Build Docker images (with cache) + working-directory: autogpt_platform + run: | + pip install pyyaml + + # Resolve extends and generate a flat compose file that bake can understand + docker compose -f docker-compose.yml config > docker-compose.resolved.yml + + # Add cache configuration to the resolved compose file + python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \ + --source docker-compose.resolved.yml \ + --cache-from "type=gha" \ + --cache-to "type=gha,mode=max" \ + --backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend') }}" \ + --frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src') }}" \ + --git-ref "${{ github.ref }}" + + # Build with bake using the resolved compose file (now includes cache config) + docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load + env: + NEXT_PUBLIC_PW_TEST: true + + - name: Set up tests - Cache E2E test data + id: e2e-data-cache uses: actions/cache@v5 with: - path: /tmp/.buildx-cache - key: ${{ runner.os }}-buildx-frontend-test-${{ hashFiles('autogpt_platform/docker-compose.yml', 'autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/pyproject.toml', 'autogpt_platform/backend/poetry.lock') }} - restore-keys: | - ${{ runner.os }}-buildx-frontend-test- + path: /tmp/e2e_test_data.sql + key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-frontend-ci.yml') }} - - name: Run docker compose + - name: Set up Platform - Start Supabase DB + Auth run: | - NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d + docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build + echo "Waiting for database to be ready..." + timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' + echo "Waiting for auth service to be ready..." + timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..." + + - name: Set up Platform - Run migrations + run: | + echo "Running migrations..." + docker compose -f ../docker-compose.resolved.yml run --rm migrate + echo "✅ Migrations completed" env: - DOCKER_BUILDKIT: 1 - BUILDX_CACHE_FROM: type=local,src=/tmp/.buildx-cache - BUILDX_CACHE_TO: type=local,dest=/tmp/.buildx-cache-new,mode=max + NEXT_PUBLIC_PW_TEST: true - - name: Move cache + - name: Set up tests - Load cached E2E test data + if: steps.e2e-data-cache.outputs.cache-hit == 'true' run: | - rm -rf /tmp/.buildx-cache - if [ -d "/tmp/.buildx-cache-new" ]; then - mv /tmp/.buildx-cache-new /tmp/.buildx-cache - fi + echo "✅ Found cached E2E test data, restoring..." + { + echo "SET session_replication_role = 'replica';" + cat /tmp/e2e_test_data.sql + echo "SET session_replication_role = 'origin';" + } | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b + # Refresh materialized views after restore + docker compose -f ../docker-compose.resolved.yml exec -T db \ + psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true - - name: Wait for services to be ready + echo "✅ E2E test data restored from cache" + + - name: Set up Platform - Start (all other services) run: | + docker compose -f ../docker-compose.resolved.yml up -d --no-build echo "Waiting for rest_server to be ready..." timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..." - echo "Waiting for database to be ready..." - timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..." + env: + NEXT_PUBLIC_PW_TEST: true - - name: Create E2E test data + - name: Set up tests - Create E2E test data + if: steps.e2e-data-cache.outputs.cache-hit != 'true' run: | echo "Creating E2E test data..." - # First try to run the script from inside the container - if docker compose -f ../docker-compose.yml exec -T rest_server test -f /app/autogpt_platform/backend/test/e2e_test_data.py; then - echo "✅ Found e2e_test_data.py in container, running it..." - docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python backend/test/e2e_test_data.py" || { - echo "❌ E2E test data creation failed!" - docker compose -f ../docker-compose.yml logs --tail=50 rest_server - exit 1 - } - else - echo "⚠️ e2e_test_data.py not found in container, copying and running..." - # Copy the script into the container and run it - docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.yml ps -q rest_server):/tmp/e2e_test_data.py || { - echo "❌ Failed to copy script to container" - exit 1 - } - docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || { - echo "❌ E2E test data creation failed!" - docker compose -f ../docker-compose.yml logs --tail=50 rest_server - exit 1 - } - fi + docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py + docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || { + echo "❌ E2E test data creation failed!" + docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server + exit 1 + } - - name: Restore dependencies cache - uses: actions/cache@v5 + # Dump auth.users + platform schema for cache (two separate dumps) + echo "Dumping database for cache..." + { + docker compose -f ../docker-compose.resolved.yml exec -T db \ + pg_dump -U postgres --data-only --column-inserts \ + --table='auth.users' postgres + docker compose -f ../docker-compose.resolved.yml exec -T db \ + pg_dump -U postgres --data-only --column-inserts \ + --schema=platform \ + --exclude-table='platform._prisma_migrations' \ + --exclude-table='platform.apscheduler_jobs' \ + --exclude-table='platform.apscheduler_jobs_batched_notifications' \ + postgres + } > /tmp/e2e_test_data.sql + + echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)" + + - name: Set up tests - Enable corepack + run: corepack enable + + - name: Set up tests - Set up Node + uses: actions/setup-node@v6 with: - path: ~/.pnpm-store - key: ${{ needs.setup.outputs.cache-key }} - restore-keys: | - ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} - ${{ runner.os }}-pnpm- + node-version: "22.18.0" + cache: "pnpm" + cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - - name: Install dependencies + - name: Set up tests - Install dependencies run: pnpm install --frozen-lockfile - - name: Install Browser 'chromium' + - name: Set up tests - Install browser 'chromium' run: pnpm playwright install --with-deps chromium - name: Run Playwright tests @@ -269,7 +281,7 @@ jobs: - name: Print Final Docker Compose logs if: always() - run: docker compose -f ../docker-compose.yml logs + run: docker compose -f ../docker-compose.resolved.yml logs integration_test: runs-on: ubuntu-latest @@ -281,22 +293,15 @@ jobs: with: submodules: recursive - - name: Set up Node.js - uses: actions/setup-node@v6 - with: - node-version: "22.18.0" - - name: Enable corepack run: corepack enable - - name: Restore dependencies cache - uses: actions/cache@v5 + - name: Set up Node + uses: actions/setup-node@v6 with: - path: ~/.pnpm-store - key: ${{ needs.setup.outputs.cache-key }} - restore-keys: | - ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }} - ${{ runner.os }}-pnpm- + node-version: "22.18.0" + cache: "pnpm" + cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile diff --git a/.github/workflows/scripts/docker-ci-fix-compose-build-cache.py b/.github/workflows/scripts/docker-ci-fix-compose-build-cache.py new file mode 100644 index 0000000000..33693fc739 --- /dev/null +++ b/.github/workflows/scripts/docker-ci-fix-compose-build-cache.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python3 +""" +Add cache configuration to a resolved docker-compose file for all services +that have a build key, and ensure image names match what docker compose expects. +""" + +import argparse + +import yaml + + +DEFAULT_BRANCH = "dev" +CACHE_BUILDS_FOR_COMPONENTS = ["backend", "frontend"] + + +def main(): + parser = argparse.ArgumentParser( + description="Add cache config to a resolved compose file" + ) + parser.add_argument( + "--source", + required=True, + help="Source compose file to read (should be output of `docker compose config`)", + ) + parser.add_argument( + "--cache-from", + default="type=gha", + help="Cache source configuration", + ) + parser.add_argument( + "--cache-to", + default="type=gha,mode=max", + help="Cache destination configuration", + ) + for component in CACHE_BUILDS_FOR_COMPONENTS: + parser.add_argument( + f"--{component}-hash", + default="", + help=f"Hash for {component} cache scope (e.g., from hashFiles())", + ) + parser.add_argument( + "--git-ref", + default="", + help="Git ref for branch-based cache scope (e.g., refs/heads/master)", + ) + args = parser.parse_args() + + # Normalize git ref to a safe scope name (e.g., refs/heads/master -> master) + git_ref_scope = "" + if args.git_ref: + git_ref_scope = args.git_ref.replace("refs/heads/", "").replace("/", "-") + + with open(args.source, "r") as f: + compose = yaml.safe_load(f) + + # Get project name from compose file or default + project_name = compose.get("name", "autogpt_platform") + + def get_image_name(dockerfile: str, target: str) -> str: + """Generate image name based on Dockerfile folder and build target.""" + dockerfile_parts = dockerfile.replace("\\", "/").split("/") + if len(dockerfile_parts) >= 2: + folder_name = dockerfile_parts[-2] # e.g., "backend" or "frontend" + else: + folder_name = "app" + return f"{project_name}-{folder_name}:{target}" + + def get_build_key(dockerfile: str, target: str) -> str: + """Generate a unique key for a Dockerfile+target combination.""" + return f"{dockerfile}:{target}" + + def get_component(dockerfile: str) -> str | None: + """Get component name (frontend/backend) from dockerfile path.""" + for component in CACHE_BUILDS_FOR_COMPONENTS: + if component in dockerfile: + return component + return None + + # First pass: collect all services with build configs and identify duplicates + # Track which (dockerfile, target) combinations we've seen + build_key_to_first_service: dict[str, str] = {} + services_to_build: list[str] = [] + services_to_dedupe: list[str] = [] + + for service_name, service_config in compose.get("services", {}).items(): + if "build" not in service_config: + continue + + build_config = service_config["build"] + dockerfile = build_config.get("dockerfile", "Dockerfile") + target = build_config.get("target", "default") + build_key = get_build_key(dockerfile, target) + + if build_key not in build_key_to_first_service: + # First service with this build config - it will do the actual build + build_key_to_first_service[build_key] = service_name + services_to_build.append(service_name) + else: + # Duplicate - will just use the image from the first service + services_to_dedupe.append(service_name) + + # Second pass: configure builds and deduplicate + modified_services = [] + for service_name, service_config in compose.get("services", {}).items(): + if "build" not in service_config: + continue + + build_config = service_config["build"] + dockerfile = build_config.get("dockerfile", "Dockerfile") + target = build_config.get("target", "latest") + image_name = get_image_name(dockerfile, target) + + # Set image name for all services (needed for both builders and deduped) + service_config["image"] = image_name + + if service_name in services_to_dedupe: + # Remove build config - this service will use the pre-built image + del service_config["build"] + continue + + # This service will do the actual build - add cache config + cache_from_list = [] + cache_to_list = [] + + component = get_component(dockerfile) + if not component: + # Skip services that don't clearly match frontend/backend + continue + + # Get the hash for this component + component_hash = getattr(args, f"{component}_hash") + + # Scope format: platform-{component}-{target}-{hash|ref} + # Example: platform-backend-server-abc123 + + if "type=gha" in args.cache_from: + # 1. Primary: exact hash match (most specific) + if component_hash: + hash_scope = f"platform-{component}-{target}-{component_hash}" + cache_from_list.append(f"{args.cache_from},scope={hash_scope}") + + # 2. Fallback: branch-based cache + if git_ref_scope: + ref_scope = f"platform-{component}-{target}-{git_ref_scope}" + cache_from_list.append(f"{args.cache_from},scope={ref_scope}") + + # 3. Fallback: dev branch cache (for PRs/feature branches) + if git_ref_scope and git_ref_scope != DEFAULT_BRANCH: + master_scope = f"platform-{component}-{target}-{DEFAULT_BRANCH}" + cache_from_list.append(f"{args.cache_from},scope={master_scope}") + + if "type=gha" in args.cache_to: + # Write to both hash-based and branch-based scopes + if component_hash: + hash_scope = f"platform-{component}-{target}-{component_hash}" + cache_to_list.append(f"{args.cache_to},scope={hash_scope}") + + if git_ref_scope: + ref_scope = f"platform-{component}-{target}-{git_ref_scope}" + cache_to_list.append(f"{args.cache_to},scope={ref_scope}") + + # Ensure we have at least one cache source/target + if not cache_from_list: + cache_from_list.append(args.cache_from) + if not cache_to_list: + cache_to_list.append(args.cache_to) + + build_config["cache_from"] = cache_from_list + build_config["cache_to"] = cache_to_list + modified_services.append(service_name) + + # Write back to the same file + with open(args.source, "w") as f: + yaml.dump(compose, f, default_flow_style=False, sort_keys=False) + + print(f"Added cache config to {len(modified_services)} services in {args.source}:") + for svc in modified_services: + svc_config = compose["services"][svc] + build_cfg = svc_config.get("build", {}) + cache_from_list = build_cfg.get("cache_from", ["none"]) + cache_to_list = build_cfg.get("cache_to", ["none"]) + print(f" - {svc}") + print(f" image: {svc_config.get('image', 'N/A')}") + print(f" cache_from: {cache_from_list}") + print(f" cache_to: {cache_to_list}") + if services_to_dedupe: + print( + f"Deduplicated {len(services_to_dedupe)} services (will use pre-built images):" + ) + for svc in services_to_dedupe: + print(f" - {svc} -> {compose['services'][svc].get('image', 'N/A')}") + + +if __name__ == "__main__": + main() diff --git a/autogpt_platform/backend/Dockerfile b/autogpt_platform/backend/Dockerfile index 9bd455e490..ace534b730 100644 --- a/autogpt_platform/backend/Dockerfile +++ b/autogpt_platform/backend/Dockerfile @@ -1,3 +1,5 @@ +# ============================ DEPENDENCY BUILDER ============================ # + FROM debian:13-slim AS builder # Set environment variables @@ -51,7 +53,9 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti COPY autogpt_platform/backend/gen_prisma_types_stub.py ./ RUN poetry run prisma generate && poetry run gen-prisma-stub -FROM debian:13-slim AS server_dependencies +# ============================== BACKEND SERVER ============================== # + +FROM debian:13-slim AS server WORKDIR /app @@ -63,15 +67,14 @@ ENV POETRY_HOME=/opt/poetry \ ENV PATH=/opt/poetry/bin:$PATH # Install Python, FFmpeg, and ImageMagick (required for video processing blocks) -RUN apt-get update && apt-get install -y \ +# Using --no-install-recommends saves ~650MB by skipping unnecessary deps like llvm, mesa, etc. +RUN apt-get update && apt-get install -y --no-install-recommends \ python3.13 \ python3-pip \ ffmpeg \ imagemagick \ && rm -rf /var/lib/apt/lists/* -# Copy only necessary files from builder -COPY --from=builder /app /app COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3* COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry # Copy Node.js installation for Prisma @@ -81,30 +84,54 @@ COPY --from=builder /usr/bin/npm /usr/bin/npm COPY --from=builder /usr/bin/npx /usr/bin/npx COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries -ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH" - -RUN mkdir -p /app/autogpt_platform/autogpt_libs -RUN mkdir -p /app/autogpt_platform/backend - -COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs - -COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/ - WORKDIR /app/autogpt_platform/backend -FROM server_dependencies AS migrate +# Copy only the .venv from builder (not the entire /app directory) +# The .venv includes the generated Prisma client +COPY --from=builder /app/autogpt_platform/backend/.venv ./.venv +ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH" -# Migration stage only needs schema and migrations - much lighter than full backend -COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/ -COPY autogpt_platform/backend/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py -COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations +# Copy dependency files + autogpt_libs (path dependency) +COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs +COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml ./ -FROM server_dependencies AS server - -COPY autogpt_platform/backend /app/autogpt_platform/backend +# Copy backend code + docs (for Copilot docs search) +COPY autogpt_platform/backend ./ COPY docs /app/docs RUN poetry install --no-ansi --only-root ENV PORT=8000 CMD ["poetry", "run", "rest"] + +# =============================== DB MIGRATOR =============================== # + +# Lightweight migrate stage - only needs Prisma CLI, not full Python environment +FROM debian:13-slim AS migrate + +WORKDIR /app/autogpt_platform/backend + +ENV DEBIAN_FRONTEND=noninteractive + +# Install only what's needed for prisma migrate: Node.js and minimal Python for prisma-python +RUN apt-get update && apt-get install -y --no-install-recommends \ + python3.13 \ + python3-pip \ + ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +# Copy Node.js from builder (needed for Prisma CLI) +COPY --from=builder /usr/bin/node /usr/bin/node +COPY --from=builder /usr/lib/node_modules /usr/lib/node_modules +COPY --from=builder /usr/bin/npm /usr/bin/npm + +# Copy Prisma binaries +COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries + +# Install prisma-client-py directly (much smaller than copying full venv) +RUN pip3 install prisma>=0.15.0 --break-system-packages + +COPY autogpt_platform/backend/schema.prisma ./ +COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py +COPY autogpt_platform/backend/gen_prisma_types_stub.py ./ +COPY autogpt_platform/backend/migrations ./migrations diff --git a/autogpt_platform/docker-compose.platform.yml b/autogpt_platform/docker-compose.platform.yml index de6ecfd612..bab92d4693 100644 --- a/autogpt_platform/docker-compose.platform.yml +++ b/autogpt_platform/docker-compose.platform.yml @@ -37,7 +37,7 @@ services: context: ../ dockerfile: autogpt_platform/backend/Dockerfile target: migrate - command: ["sh", "-c", "poetry run prisma generate && poetry run gen-prisma-stub && poetry run prisma migrate deploy"] + command: ["sh", "-c", "prisma generate && python3 gen_prisma_types_stub.py && prisma migrate deploy"] develop: watch: - path: ./ @@ -56,7 +56,7 @@ services: test: [ "CMD-SHELL", - "poetry run prisma migrate status | grep -q 'No pending migrations' || exit 1", + "prisma migrate status | grep -q 'No pending migrations' || exit 1", ] interval: 30s timeout: 10s