name: AutoGPT Platform - Frontend CI on: push: branches: [master, dev] paths: - ".github/workflows/platform-frontend-ci.yml" - "autogpt_platform/frontend/**" pull_request: paths: - ".github/workflows/platform-frontend-ci.yml" - "autogpt_platform/frontend/**" merge_group: workflow_dispatch: concurrency: group: ${{ github.workflow }}-${{ github.event_name == 'merge_group' && format('merge-queue-{0}', github.ref) || format('{0}-{1}', github.ref, github.event.pull_request.number || github.sha) }} cancel-in-progress: ${{ github.event_name == 'pull_request' }} defaults: run: shell: bash working-directory: autogpt_platform/frontend jobs: setup: runs-on: ubuntu-latest outputs: components-changed: ${{ steps.filter.outputs.components }} steps: - name: Checkout repository uses: actions/checkout@v6 - name: Check for component changes uses: dorny/paths-filter@v3 id: filter with: filters: | components: - 'autogpt_platform/frontend/src/components/**' - name: Enable corepack run: corepack enable - name: Set up Node uses: actions/setup-node@v6 with: node-version: "22.18.0" cache: "pnpm" cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies to populate cache run: pnpm install --frozen-lockfile lint: runs-on: ubuntu-latest needs: setup steps: - name: Checkout repository uses: actions/checkout@v6 - name: Enable corepack run: corepack enable - name: Set up Node uses: actions/setup-node@v6 with: node-version: "22.18.0" cache: "pnpm" cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile - name: Run lint run: pnpm lint chromatic: runs-on: ubuntu-latest needs: setup # Disabled: to re-enable, remove 'false &&' from the condition below if: >- false && (github.ref == 'refs/heads/dev' || github.base_ref == 'dev') && needs.setup.outputs.components-changed == 'true' steps: - name: Checkout repository uses: actions/checkout@v6 with: fetch-depth: 0 - name: Enable corepack run: corepack enable - name: Set up Node uses: actions/setup-node@v6 with: node-version: "22.18.0" cache: "pnpm" cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile - name: Run Chromatic uses: chromaui/action@latest with: projectToken: chpt_9e7c1a76478c9c8 onlyChanged: true workingDir: autogpt_platform/frontend token: ${{ secrets.GITHUB_TOKEN }} exitOnceUploaded: true e2e_test: name: end-to-end tests runs-on: big-boi steps: - name: Checkout repository uses: actions/checkout@v6 with: submodules: recursive - name: Set up Platform - Copy default supabase .env run: | cp ../.env.default ../.env - name: Set up Platform - Copy backend .env and set OpenAI API key run: | cp ../backend/.env.default ../backend/.env echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env env: # Used by E2E test data script to generate embeddings for approved store agents OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - name: Set up Platform - Set up Docker Buildx uses: docker/setup-buildx-action@v3 with: driver: docker-container driver-opts: network=host - name: Set up Platform - Expose GHA cache to docker buildx CLI uses: crazy-max/ghaction-github-runtime@v3 - name: Set up Platform - Build Docker images (with cache) working-directory: autogpt_platform run: | pip install pyyaml # Resolve extends and generate a flat compose file that bake can understand docker compose -f docker-compose.yml config > docker-compose.resolved.yml # Add cache configuration to the resolved compose file python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \ --source docker-compose.resolved.yml \ --cache-from "type=gha" \ --cache-to "type=gha,mode=max" \ --backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend') }}" \ --frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src') }}" \ --git-ref "${{ github.ref }}" # Build with bake using the resolved compose file (now includes cache config) docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load env: NEXT_PUBLIC_PW_TEST: true - name: Set up tests - Cache E2E test data id: e2e-data-cache uses: actions/cache@v5 with: path: /tmp/e2e_test_data.sql key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-frontend-ci.yml') }} - name: Set up Platform - Start Supabase DB + Auth run: | docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build echo "Waiting for database to be ready..." timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' echo "Waiting for auth service to be ready..." timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..." - name: Set up Platform - Run migrations run: | echo "Running migrations..." docker compose -f ../docker-compose.resolved.yml run --rm migrate echo "✅ Migrations completed" env: NEXT_PUBLIC_PW_TEST: true - name: Set up tests - Load cached E2E test data if: steps.e2e-data-cache.outputs.cache-hit == 'true' run: | echo "✅ Found cached E2E test data, restoring..." { echo "SET session_replication_role = 'replica';" cat /tmp/e2e_test_data.sql echo "SET session_replication_role = 'origin';" } | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b # Refresh materialized views after restore docker compose -f ../docker-compose.resolved.yml exec -T db \ psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true echo "✅ E2E test data restored from cache" - name: Set up Platform - Start (all other services) run: | docker compose -f ../docker-compose.resolved.yml up -d --no-build echo "Waiting for rest_server to be ready..." timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..." env: NEXT_PUBLIC_PW_TEST: true - name: Set up tests - Create E2E test data if: steps.e2e-data-cache.outputs.cache-hit != 'true' run: | echo "Creating E2E test data..." docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || { echo "❌ E2E test data creation failed!" docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server exit 1 } # Dump auth.users + platform schema for cache (two separate dumps) echo "Dumping database for cache..." { docker compose -f ../docker-compose.resolved.yml exec -T db \ pg_dump -U postgres --data-only --column-inserts \ --table='auth.users' postgres docker compose -f ../docker-compose.resolved.yml exec -T db \ pg_dump -U postgres --data-only --column-inserts \ --schema=platform \ --exclude-table='platform._prisma_migrations' \ --exclude-table='platform.apscheduler_jobs' \ --exclude-table='platform.apscheduler_jobs_batched_notifications' \ postgres } > /tmp/e2e_test_data.sql echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)" - name: Set up tests - Enable corepack run: corepack enable - name: Set up tests - Set up Node uses: actions/setup-node@v6 with: node-version: "22.18.0" cache: "pnpm" cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Set up tests - Install dependencies run: pnpm install --frozen-lockfile - name: Set up tests - Install browser 'chromium' run: pnpm playwright install --with-deps chromium - name: Run Playwright tests run: pnpm test:no-build continue-on-error: false - name: Upload Playwright report if: always() uses: actions/upload-artifact@v4 with: name: playwright-report path: playwright-report if-no-files-found: ignore retention-days: 3 - name: Upload Playwright test results if: always() uses: actions/upload-artifact@v4 with: name: playwright-test-results path: test-results if-no-files-found: ignore retention-days: 3 - name: Print Final Docker Compose logs if: always() run: docker compose -f ../docker-compose.resolved.yml logs integration_test: runs-on: ubuntu-latest needs: setup steps: - name: Checkout repository uses: actions/checkout@v6 with: submodules: recursive - name: Enable corepack run: corepack enable - name: Set up Node uses: actions/setup-node@v6 with: node-version: "22.18.0" cache: "pnpm" cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml - name: Install dependencies run: pnpm install --frozen-lockfile - name: Generate API client run: pnpm generate:api - name: Run Integration Tests run: pnpm test:unit