mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
When OpenAI credentials are unavailable (fork PRs, dev envs without API keys), both builder block search and store agent functionality break: 1. **Block search returns wrong results.** `unified_hybrid_search` falls back to a zero vector when embedding generation fails. With ~200 blocks in `UnifiedContentEmbedding`, the zero-vector semantic scores are garbage, and lexical matching on short block names is too weak — "Store Value" doesn't appear in the top results for query "Store Value". 2. **Store submission approval fails entirely.** `review_store_submission` calls `ensure_embedding()` inside a transaction. When it throws, the entire transaction rolls back — no store submissions get approved, the `StoreAgent` materialized view stays empty, and all marketplace e2e tests fail. 3. **Store search returns nothing.** Even when store data exists, `hybrid_search` queries `UnifiedContentEmbedding` which has no store agent rows (backfill failed). It succeeds with zero results rather than throwing, so the existing exception-based fallback never triggers. ### Changes 🏗️ - Replace `unified_hybrid_search` with in-memory text search in `_hybrid_search_blocks` (-> `_text_search_blocks`). All ~200 blocks are already loaded in memory, and `_score_primary_fields` provides correct deterministic text relevance scoring against block name, description, and input schema field descriptions — the same rich text the embedding pipeline uses. CamelCase block names are split via `split_camelcase()` to match the tokenization from PR #12400. - Make embedding generation in `review_store_submission` best-effort: catch failures and log a warning instead of rolling back the approval transaction. The backfill scheduler retries later when credentials become available. - Fall through to direct DB search when `hybrid_search` returns empty results (not just when it throws). The fallback uses ad-hoc `to_tsvector`/`plainto_tsquery` with `ts_rank_cd` ranking on `StoreAgent` view fields, restoring the search quality of the original pre-hybrid implementation (stemming, stop-word removal, relevance ranking). - Fix Playwright artifact upload in end-to-end test CI ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] `build.spec.ts`: 8/8 pass locally (was 0/7 before fix) - [x] All 79 e2e tests pass in CI (was 15 failures before fix) --- Co-authored-by: Reinier van der Leer (@Pwuts) --------- Co-authored-by: Reinier van der Leer <pwuts@agpt.co> Co-authored-by: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
313 lines
12 KiB
YAML
313 lines
12 KiB
YAML
name: AutoGPT Platform - Full-stack CI
|
|
|
|
on:
|
|
push:
|
|
branches: [master, dev]
|
|
paths:
|
|
- ".github/workflows/platform-fullstack-ci.yml"
|
|
- ".github/workflows/scripts/docker-ci-fix-compose-build-cache.py"
|
|
- ".github/workflows/scripts/get_package_version_from_lockfile.py"
|
|
- "autogpt_platform/**"
|
|
pull_request:
|
|
paths:
|
|
- ".github/workflows/platform-fullstack-ci.yml"
|
|
- ".github/workflows/scripts/docker-ci-fix-compose-build-cache.py"
|
|
- ".github/workflows/scripts/get_package_version_from_lockfile.py"
|
|
- "autogpt_platform/**"
|
|
merge_group:
|
|
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.event_name == 'merge_group' && format('merge-queue-{0}', github.ref) || github.head_ref && format('pr-{0}', github.event.pull_request.number) || github.sha }}
|
|
cancel-in-progress: ${{ github.event_name == 'pull_request' }}
|
|
|
|
defaults:
|
|
run:
|
|
shell: bash
|
|
working-directory: autogpt_platform/frontend
|
|
|
|
jobs:
|
|
setup:
|
|
runs-on: ubuntu-latest
|
|
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v6
|
|
|
|
- name: Enable corepack
|
|
run: corepack enable
|
|
|
|
- name: Set up Node
|
|
uses: actions/setup-node@v6
|
|
with:
|
|
node-version: "22.18.0"
|
|
cache: "pnpm"
|
|
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
|
|
- name: Install dependencies to populate cache
|
|
run: pnpm install --frozen-lockfile
|
|
|
|
check-api-types:
|
|
name: check API types
|
|
runs-on: ubuntu-latest
|
|
needs: setup
|
|
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: recursive
|
|
|
|
# ------------------------ Backend setup ------------------------
|
|
|
|
- name: Set up Backend - Set up Python
|
|
uses: actions/setup-python@v5
|
|
with:
|
|
python-version: "3.12"
|
|
|
|
- name: Set up Backend - Install Poetry
|
|
working-directory: autogpt_platform/backend
|
|
run: |
|
|
POETRY_VERSION=$(python ../../.github/workflows/scripts/get_package_version_from_lockfile.py poetry)
|
|
echo "Installing Poetry version ${POETRY_VERSION}"
|
|
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$POETRY_VERSION python3 -
|
|
|
|
- name: Set up Backend - Set up dependency cache
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: ~/.cache/pypoetry
|
|
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }}
|
|
|
|
- name: Set up Backend - Install dependencies
|
|
working-directory: autogpt_platform/backend
|
|
run: poetry install
|
|
|
|
- name: Set up Backend - Generate Prisma client
|
|
working-directory: autogpt_platform/backend
|
|
run: poetry run prisma generate && poetry run gen-prisma-stub
|
|
|
|
- name: Set up Frontend - Export OpenAPI schema from Backend
|
|
working-directory: autogpt_platform/backend
|
|
run: poetry run export-api-schema --output ../frontend/src/app/api/openapi.json
|
|
|
|
# ------------------------ Frontend setup ------------------------
|
|
|
|
- name: Set up Frontend - Enable corepack
|
|
run: corepack enable
|
|
|
|
- name: Set up Frontend - Set up Node
|
|
uses: actions/setup-node@v6
|
|
with:
|
|
node-version: "22.18.0"
|
|
cache: "pnpm"
|
|
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
|
|
- name: Set up Frontend - Install dependencies
|
|
run: pnpm install --frozen-lockfile
|
|
|
|
- name: Set up Frontend - Format OpenAPI schema
|
|
id: format-schema
|
|
run: pnpm prettier --write ./src/app/api/openapi.json
|
|
|
|
- name: Check for API schema changes
|
|
run: |
|
|
if ! git diff --exit-code src/app/api/openapi.json; then
|
|
echo "❌ API schema changes detected in src/app/api/openapi.json"
|
|
echo ""
|
|
echo "The openapi.json file has been modified after exporting the API schema."
|
|
echo "This usually means changes have been made in the BE endpoints without updating the Frontend."
|
|
echo "The API schema is now out of sync with the Front-end queries."
|
|
echo ""
|
|
echo "To fix this:"
|
|
echo "\nIn the backend directory:"
|
|
echo "1. Run 'poetry run export-api-schema --output ../frontend/src/app/api/openapi.json'"
|
|
echo "\nIn the frontend directory:"
|
|
echo "2. Run 'pnpm prettier --write src/app/api/openapi.json'"
|
|
echo "3. Run 'pnpm generate:api'"
|
|
echo "4. Run 'pnpm types'"
|
|
echo "5. Fix any TypeScript errors that may have been introduced"
|
|
echo "6. Commit and push your changes"
|
|
echo ""
|
|
exit 1
|
|
else
|
|
echo "✅ No API schema changes detected"
|
|
fi
|
|
|
|
- name: Set up Frontend - Generate API client
|
|
id: generate-api-client
|
|
run: pnpm orval --config ./orval.config.ts
|
|
# Continue with type generation & check even if there are schema changes
|
|
if: success() || (steps.format-schema.outcome == 'success')
|
|
|
|
- name: Check for TypeScript errors
|
|
run: pnpm types
|
|
if: success() || (steps.generate-api-client.outcome == 'success')
|
|
|
|
e2e_test:
|
|
name: end-to-end tests
|
|
runs-on: big-boi
|
|
|
|
steps:
|
|
- name: Checkout repository
|
|
uses: actions/checkout@v6
|
|
with:
|
|
submodules: recursive
|
|
|
|
- name: Set up Platform - Copy default supabase .env
|
|
run: |
|
|
cp ../.env.default ../.env
|
|
|
|
- name: Set up Platform - Copy backend .env and set OpenAI API key
|
|
run: |
|
|
cp ../backend/.env.default ../backend/.env
|
|
echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env
|
|
env:
|
|
# Used by E2E test data script to generate embeddings for approved store agents
|
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
|
|
|
- name: Set up Platform - Set up Docker Buildx
|
|
uses: docker/setup-buildx-action@v3
|
|
with:
|
|
driver: docker-container
|
|
driver-opts: network=host
|
|
|
|
- name: Set up Platform - Expose GHA cache to docker buildx CLI
|
|
uses: crazy-max/ghaction-github-runtime@v4
|
|
|
|
- name: Set up Platform - Build Docker images (with cache)
|
|
working-directory: autogpt_platform
|
|
run: |
|
|
pip install pyyaml
|
|
|
|
# Resolve extends and generate a flat compose file that bake can understand
|
|
docker compose -f docker-compose.yml config > docker-compose.resolved.yml
|
|
|
|
# Add cache configuration to the resolved compose file
|
|
python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \
|
|
--source docker-compose.resolved.yml \
|
|
--cache-from "type=gha" \
|
|
--cache-to "type=gha,mode=max" \
|
|
--backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend/**') }}" \
|
|
--frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src/**') }}" \
|
|
--git-ref "${{ github.ref }}"
|
|
|
|
# Build with bake using the resolved compose file (now includes cache config)
|
|
docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load
|
|
env:
|
|
NEXT_PUBLIC_PW_TEST: true
|
|
|
|
- name: Set up tests - Cache E2E test data
|
|
id: e2e-data-cache
|
|
uses: actions/cache@v5
|
|
with:
|
|
path: /tmp/e2e_test_data.sql
|
|
key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-fullstack-ci.yml') }}
|
|
|
|
- name: Set up Platform - Start Supabase DB + Auth
|
|
run: |
|
|
docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build
|
|
echo "Waiting for database to be ready..."
|
|
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done'
|
|
echo "Waiting for auth service to be ready..."
|
|
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..."
|
|
|
|
- name: Set up Platform - Run migrations
|
|
run: |
|
|
echo "Running migrations..."
|
|
docker compose -f ../docker-compose.resolved.yml run --rm migrate
|
|
echo "✅ Migrations completed"
|
|
env:
|
|
NEXT_PUBLIC_PW_TEST: true
|
|
|
|
- name: Set up tests - Load cached E2E test data
|
|
if: steps.e2e-data-cache.outputs.cache-hit == 'true'
|
|
run: |
|
|
echo "✅ Found cached E2E test data, restoring..."
|
|
{
|
|
echo "SET session_replication_role = 'replica';"
|
|
cat /tmp/e2e_test_data.sql
|
|
echo "SET session_replication_role = 'origin';"
|
|
} | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b
|
|
# Refresh materialized views after restore
|
|
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true
|
|
|
|
echo "✅ E2E test data restored from cache"
|
|
|
|
- name: Set up Platform - Start (all other services)
|
|
run: |
|
|
docker compose -f ../docker-compose.resolved.yml up -d --no-build
|
|
echo "Waiting for rest_server to be ready..."
|
|
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
|
env:
|
|
NEXT_PUBLIC_PW_TEST: true
|
|
|
|
- name: Set up tests - Create E2E test data
|
|
if: steps.e2e-data-cache.outputs.cache-hit != 'true'
|
|
run: |
|
|
echo "Creating E2E test data..."
|
|
docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py
|
|
docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || {
|
|
echo "❌ E2E test data creation failed!"
|
|
docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server
|
|
exit 1
|
|
}
|
|
|
|
# Dump auth.users + platform schema for cache (two separate dumps)
|
|
echo "Dumping database for cache..."
|
|
{
|
|
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
pg_dump -U postgres --data-only --column-inserts \
|
|
--table='auth.users' postgres
|
|
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
pg_dump -U postgres --data-only --column-inserts \
|
|
--schema=platform \
|
|
--exclude-table='platform._prisma_migrations' \
|
|
--exclude-table='platform.apscheduler_jobs' \
|
|
--exclude-table='platform.apscheduler_jobs_batched_notifications' \
|
|
postgres
|
|
} > /tmp/e2e_test_data.sql
|
|
|
|
echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)"
|
|
|
|
- name: Set up tests - Enable corepack
|
|
run: corepack enable
|
|
|
|
- name: Set up tests - Set up Node
|
|
uses: actions/setup-node@v6
|
|
with:
|
|
node-version: "22.18.0"
|
|
cache: "pnpm"
|
|
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
|
|
- name: Set up tests - Install dependencies
|
|
run: pnpm install --frozen-lockfile
|
|
|
|
- name: Set up tests - Install browser 'chromium'
|
|
run: pnpm playwright install --with-deps chromium
|
|
|
|
- name: Run Playwright tests
|
|
run: pnpm test:no-build
|
|
continue-on-error: false
|
|
|
|
- name: Upload Playwright report
|
|
if: always()
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: playwright-report
|
|
path: autogpt_platform/frontend/playwright-report
|
|
if-no-files-found: ignore
|
|
retention-days: 3
|
|
|
|
- name: Upload Playwright test results
|
|
if: always()
|
|
uses: actions/upload-artifact@v4
|
|
with:
|
|
name: playwright-test-results
|
|
path: autogpt_platform/frontend/test-results
|
|
if-no-files-found: ignore
|
|
retention-days: 3
|
|
|
|
- name: Print Final Docker Compose logs
|
|
if: always()
|
|
run: docker compose -f ../docker-compose.resolved.yml logs
|