mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-13 00:05:02 -05:00
Compare commits
10 Commits
ci/claude-
...
fix/claude
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0b2fb655bc | ||
|
|
99f8bf5f0c | ||
|
|
3f76f1318b | ||
|
|
b011289dd2 | ||
|
|
49c2f578b4 | ||
|
|
7150b7768d | ||
|
|
8c95b03636 | ||
|
|
4a8368887f | ||
|
|
d46e5e6b6a | ||
|
|
4e632bbd60 |
@@ -5,13 +5,42 @@
|
|||||||
!docs/
|
!docs/
|
||||||
|
|
||||||
# Platform - Libs
|
# Platform - Libs
|
||||||
!autogpt_platform/autogpt_libs/
|
!autogpt_platform/autogpt_libs/autogpt_libs/
|
||||||
|
!autogpt_platform/autogpt_libs/pyproject.toml
|
||||||
|
!autogpt_platform/autogpt_libs/poetry.lock
|
||||||
|
!autogpt_platform/autogpt_libs/README.md
|
||||||
|
|
||||||
# Platform - Backend
|
# Platform - Backend
|
||||||
!autogpt_platform/backend/
|
!autogpt_platform/backend/backend/
|
||||||
|
!autogpt_platform/backend/test/e2e_test_data.py
|
||||||
|
!autogpt_platform/backend/migrations/
|
||||||
|
!autogpt_platform/backend/schema.prisma
|
||||||
|
!autogpt_platform/backend/pyproject.toml
|
||||||
|
!autogpt_platform/backend/poetry.lock
|
||||||
|
!autogpt_platform/backend/README.md
|
||||||
|
!autogpt_platform/backend/.env
|
||||||
|
!autogpt_platform/backend/gen_prisma_types_stub.py
|
||||||
|
|
||||||
|
# Platform - Market
|
||||||
|
!autogpt_platform/market/market/
|
||||||
|
!autogpt_platform/market/scripts.py
|
||||||
|
!autogpt_platform/market/schema.prisma
|
||||||
|
!autogpt_platform/market/pyproject.toml
|
||||||
|
!autogpt_platform/market/poetry.lock
|
||||||
|
!autogpt_platform/market/README.md
|
||||||
|
|
||||||
# Platform - Frontend
|
# Platform - Frontend
|
||||||
!autogpt_platform/frontend/
|
!autogpt_platform/frontend/src/
|
||||||
|
!autogpt_platform/frontend/public/
|
||||||
|
!autogpt_platform/frontend/scripts/
|
||||||
|
!autogpt_platform/frontend/package.json
|
||||||
|
!autogpt_platform/frontend/pnpm-lock.yaml
|
||||||
|
!autogpt_platform/frontend/tsconfig.json
|
||||||
|
!autogpt_platform/frontend/README.md
|
||||||
|
## config
|
||||||
|
!autogpt_platform/frontend/*.config.*
|
||||||
|
!autogpt_platform/frontend/.env.*
|
||||||
|
!autogpt_platform/frontend/.env
|
||||||
|
|
||||||
# Classic - AutoGPT
|
# Classic - AutoGPT
|
||||||
!classic/original_autogpt/autogpt/
|
!classic/original_autogpt/autogpt/
|
||||||
@@ -35,38 +64,6 @@
|
|||||||
# Classic - Frontend
|
# Classic - Frontend
|
||||||
!classic/frontend/build/web/
|
!classic/frontend/build/web/
|
||||||
|
|
||||||
# Explicitly re-ignore unwanted files from whitelisted directories
|
# Explicitly re-ignore some folders
|
||||||
# Note: These patterns MUST come after the whitelist rules to take effect
|
.*
|
||||||
|
**/__pycache__
|
||||||
# Hidden files and directories (but keep frontend .env files needed for build)
|
|
||||||
**/.*
|
|
||||||
!autogpt_platform/frontend/.env
|
|
||||||
!autogpt_platform/frontend/.env.default
|
|
||||||
!autogpt_platform/frontend/.env.production
|
|
||||||
|
|
||||||
# Python artifacts
|
|
||||||
**/__pycache__/
|
|
||||||
**/*.pyc
|
|
||||||
**/*.pyo
|
|
||||||
**/.venv/
|
|
||||||
**/.ruff_cache/
|
|
||||||
**/.pytest_cache/
|
|
||||||
**/.coverage
|
|
||||||
**/htmlcov/
|
|
||||||
|
|
||||||
# Node artifacts
|
|
||||||
**/node_modules/
|
|
||||||
**/.next/
|
|
||||||
**/storybook-static/
|
|
||||||
**/playwright-report/
|
|
||||||
**/test-results/
|
|
||||||
|
|
||||||
# Build artifacts
|
|
||||||
**/dist/
|
|
||||||
**/build/
|
|
||||||
!autogpt_platform/frontend/src/**/build/
|
|
||||||
**/target/
|
|
||||||
|
|
||||||
# Logs and temp files
|
|
||||||
**/*.log
|
|
||||||
**/*.tmp
|
|
||||||
|
|||||||
42
.github/workflows/claude-ci-failure-auto-fix.yml
vendored
42
.github/workflows/claude-ci-failure-auto-fix.yml
vendored
@@ -40,48 +40,6 @@ jobs:
|
|||||||
git checkout -b "$BRANCH_NAME"
|
git checkout -b "$BRANCH_NAME"
|
||||||
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
# Backend Python/Poetry setup (so Claude can run linting/tests)
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: "3.11"
|
|
||||||
|
|
||||||
- name: Set up Python dependency cache
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
path: ~/.cache/pypoetry
|
|
||||||
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }}
|
|
||||||
|
|
||||||
- name: Install Poetry
|
|
||||||
run: |
|
|
||||||
cd autogpt_platform/backend
|
|
||||||
HEAD_POETRY_VERSION=$(python3 ../../.github/workflows/scripts/get_package_version_from_lockfile.py poetry)
|
|
||||||
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$HEAD_POETRY_VERSION python3 -
|
|
||||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
- name: Install Python dependencies
|
|
||||||
working-directory: autogpt_platform/backend
|
|
||||||
run: poetry install
|
|
||||||
|
|
||||||
- name: Generate Prisma Client
|
|
||||||
working-directory: autogpt_platform/backend
|
|
||||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
|
||||||
|
|
||||||
# Frontend Node.js/pnpm setup (so Claude can run linting/tests)
|
|
||||||
- name: Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version: "22"
|
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
||||||
|
|
||||||
- name: Install JavaScript dependencies
|
|
||||||
working-directory: autogpt_platform/frontend
|
|
||||||
run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Get CI failure details
|
- name: Get CI failure details
|
||||||
id: failure_details
|
id: failure_details
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v8
|
||||||
|
|||||||
22
.github/workflows/claude-dependabot.yml
vendored
22
.github/workflows/claude-dependabot.yml
vendored
@@ -77,15 +77,27 @@ jobs:
|
|||||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
run: poetry run prisma generate && poetry run gen-prisma-stub
|
||||||
|
|
||||||
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
||||||
- name: Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Set pnpm store directory
|
||||||
|
run: |
|
||||||
|
pnpm config set store-dir ~/.pnpm-store
|
||||||
|
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Cache frontend dependencies
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Install JavaScript dependencies
|
- name: Install JavaScript dependencies
|
||||||
working-directory: autogpt_platform/frontend
|
working-directory: autogpt_platform/frontend
|
||||||
|
|||||||
22
.github/workflows/claude.yml
vendored
22
.github/workflows/claude.yml
vendored
@@ -93,15 +93,27 @@ jobs:
|
|||||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
run: poetry run prisma generate && poetry run gen-prisma-stub
|
||||||
|
|
||||||
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
||||||
- name: Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node.js
|
- name: Set up Node.js
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22"
|
node-version: "22"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Set pnpm store directory
|
||||||
|
run: |
|
||||||
|
pnpm config set store-dir ~/.pnpm-store
|
||||||
|
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Cache frontend dependencies
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Install JavaScript dependencies
|
- name: Install JavaScript dependencies
|
||||||
working-directory: autogpt_platform/frontend
|
working-directory: autogpt_platform/frontend
|
||||||
|
|||||||
249
.github/workflows/platform-frontend-ci.yml
vendored
249
.github/workflows/platform-frontend-ci.yml
vendored
@@ -26,6 +26,7 @@ jobs:
|
|||||||
setup:
|
setup:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
|
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||||
components-changed: ${{ steps.filter.outputs.components }}
|
components-changed: ${{ steps.filter.outputs.components }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -40,17 +41,28 @@ jobs:
|
|||||||
components:
|
components:
|
||||||
- 'autogpt_platform/frontend/src/components/**'
|
- 'autogpt_platform/frontend/src/components/**'
|
||||||
|
|
||||||
- name: Enable corepack
|
- name: Set up Node.js
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.18.0"
|
node-version: "22.18.0"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
||||||
|
|
||||||
- name: Install dependencies to populate cache
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Generate cache key
|
||||||
|
id: cache-key
|
||||||
|
run: echo "key=${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Cache dependencies
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ steps.cache-key.outputs.key }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
@@ -61,15 +73,22 @@ jobs:
|
|||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Enable corepack
|
- name: Set up Node.js
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.18.0"
|
node-version: "22.18.0"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Restore dependencies cache
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ needs.setup.outputs.cache-key }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
@@ -92,15 +111,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Enable corepack
|
- name: Set up Node.js
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.18.0"
|
node-version: "22.18.0"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Restore dependencies cache
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ needs.setup.outputs.cache-key }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
@@ -115,8 +141,10 @@ jobs:
|
|||||||
exitOnceUploaded: true
|
exitOnceUploaded: true
|
||||||
|
|
||||||
e2e_test:
|
e2e_test:
|
||||||
name: end-to-end tests
|
|
||||||
runs-on: big-boi
|
runs-on: big-boi
|
||||||
|
needs: setup
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
@@ -124,11 +152,19 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- name: Set up Platform - Copy default supabase .env
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: "22.18.0"
|
||||||
|
|
||||||
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Copy default supabase .env
|
||||||
run: |
|
run: |
|
||||||
cp ../.env.default ../.env
|
cp ../.env.default ../.env
|
||||||
|
|
||||||
- name: Set up Platform - Copy backend .env and set OpenAI API key
|
- name: Copy backend .env and set OpenAI API key
|
||||||
run: |
|
run: |
|
||||||
cp ../backend/.env.default ../backend/.env
|
cp ../backend/.env.default ../backend/.env
|
||||||
echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env
|
echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env
|
||||||
@@ -136,125 +172,77 @@ jobs:
|
|||||||
# Used by E2E test data script to generate embeddings for approved store agents
|
# Used by E2E test data script to generate embeddings for approved store agents
|
||||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||||
|
|
||||||
- name: Set up Platform - Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
with:
|
|
||||||
driver: docker-container
|
|
||||||
driver-opts: network=host
|
|
||||||
|
|
||||||
- name: Set up Platform - Expose GHA cache to docker buildx CLI
|
- name: Cache Docker layers
|
||||||
uses: crazy-max/ghaction-github-runtime@v3
|
|
||||||
|
|
||||||
- name: Set up Platform - Build Docker images (with cache)
|
|
||||||
working-directory: autogpt_platform
|
|
||||||
run: |
|
|
||||||
pip install pyyaml
|
|
||||||
|
|
||||||
# Resolve extends and generate a flat compose file that bake can understand
|
|
||||||
docker compose -f docker-compose.yml config > docker-compose.resolved.yml
|
|
||||||
|
|
||||||
# Add cache configuration to the resolved compose file
|
|
||||||
python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \
|
|
||||||
--source docker-compose.resolved.yml \
|
|
||||||
--cache-from "type=gha" \
|
|
||||||
--cache-to "type=gha,mode=max" \
|
|
||||||
--backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend') }}" \
|
|
||||||
--frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src') }}" \
|
|
||||||
--git-ref "${{ github.ref }}"
|
|
||||||
|
|
||||||
# Build with bake using the resolved compose file (now includes cache config)
|
|
||||||
docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load
|
|
||||||
env:
|
|
||||||
NEXT_PUBLIC_PW_TEST: true
|
|
||||||
|
|
||||||
- name: Set up tests - Cache E2E test data
|
|
||||||
id: e2e-data-cache
|
|
||||||
uses: actions/cache@v5
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: /tmp/e2e_test_data.sql
|
path: /tmp/.buildx-cache
|
||||||
key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-frontend-ci.yml') }}
|
key: ${{ runner.os }}-buildx-frontend-test-${{ hashFiles('autogpt_platform/docker-compose.yml', 'autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/pyproject.toml', 'autogpt_platform/backend/poetry.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-buildx-frontend-test-
|
||||||
|
|
||||||
- name: Set up Platform - Start Supabase DB + Auth
|
- name: Run docker compose
|
||||||
run: |
|
run: |
|
||||||
docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build
|
NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d
|
||||||
echo "Waiting for database to be ready..."
|
|
||||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done'
|
|
||||||
echo "Waiting for auth service to be ready..."
|
|
||||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..."
|
|
||||||
|
|
||||||
- name: Set up Platform - Run migrations
|
|
||||||
run: |
|
|
||||||
echo "Running migrations..."
|
|
||||||
docker compose -f ../docker-compose.resolved.yml run --rm migrate
|
|
||||||
echo "✅ Migrations completed"
|
|
||||||
env:
|
env:
|
||||||
NEXT_PUBLIC_PW_TEST: true
|
DOCKER_BUILDKIT: 1
|
||||||
|
BUILDX_CACHE_FROM: type=local,src=/tmp/.buildx-cache
|
||||||
|
BUILDX_CACHE_TO: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||||
|
|
||||||
- name: Set up tests - Load cached E2E test data
|
- name: Move cache
|
||||||
if: steps.e2e-data-cache.outputs.cache-hit == 'true'
|
|
||||||
run: |
|
run: |
|
||||||
echo "✅ Found cached E2E test data, restoring..."
|
rm -rf /tmp/.buildx-cache
|
||||||
{
|
if [ -d "/tmp/.buildx-cache-new" ]; then
|
||||||
echo "SET session_replication_role = 'replica';"
|
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||||
cat /tmp/e2e_test_data.sql
|
fi
|
||||||
echo "SET session_replication_role = 'origin';"
|
|
||||||
} | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b
|
|
||||||
# Refresh materialized views after restore
|
|
||||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
||||||
psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true
|
|
||||||
|
|
||||||
echo "✅ E2E test data restored from cache"
|
- name: Wait for services to be ready
|
||||||
|
|
||||||
- name: Set up Platform - Start (all other services)
|
|
||||||
run: |
|
run: |
|
||||||
docker compose -f ../docker-compose.resolved.yml up -d --no-build
|
|
||||||
echo "Waiting for rest_server to be ready..."
|
echo "Waiting for rest_server to be ready..."
|
||||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||||
env:
|
echo "Waiting for database to be ready..."
|
||||||
NEXT_PUBLIC_PW_TEST: true
|
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||||
|
|
||||||
- name: Set up tests - Create E2E test data
|
- name: Create E2E test data
|
||||||
if: steps.e2e-data-cache.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
run: |
|
||||||
echo "Creating E2E test data..."
|
echo "Creating E2E test data..."
|
||||||
docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py
|
# First try to run the script from inside the container
|
||||||
docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || {
|
if docker compose -f ../docker-compose.yml exec -T rest_server test -f /app/autogpt_platform/backend/test/e2e_test_data.py; then
|
||||||
echo "❌ E2E test data creation failed!"
|
echo "✅ Found e2e_test_data.py in container, running it..."
|
||||||
docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server
|
docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python backend/test/e2e_test_data.py" || {
|
||||||
exit 1
|
echo "❌ E2E test data creation failed!"
|
||||||
}
|
docker compose -f ../docker-compose.yml logs --tail=50 rest_server
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
else
|
||||||
|
echo "⚠️ e2e_test_data.py not found in container, copying and running..."
|
||||||
|
# Copy the script into the container and run it
|
||||||
|
docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.yml ps -q rest_server):/tmp/e2e_test_data.py || {
|
||||||
|
echo "❌ Failed to copy script to container"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || {
|
||||||
|
echo "❌ E2E test data creation failed!"
|
||||||
|
docker compose -f ../docker-compose.yml logs --tail=50 rest_server
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
# Dump auth.users + platform schema for cache (two separate dumps)
|
- name: Restore dependencies cache
|
||||||
echo "Dumping database for cache..."
|
uses: actions/cache@v5
|
||||||
{
|
|
||||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
||||||
pg_dump -U postgres --data-only --column-inserts \
|
|
||||||
--table='auth.users' postgres
|
|
||||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
|
||||||
pg_dump -U postgres --data-only --column-inserts \
|
|
||||||
--schema=platform \
|
|
||||||
--exclude-table='platform._prisma_migrations' \
|
|
||||||
--exclude-table='platform.apscheduler_jobs' \
|
|
||||||
--exclude-table='platform.apscheduler_jobs_batched_notifications' \
|
|
||||||
postgres
|
|
||||||
} > /tmp/e2e_test_data.sql
|
|
||||||
|
|
||||||
echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)"
|
|
||||||
|
|
||||||
- name: Set up tests - Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up tests - Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
with:
|
||||||
node-version: "22.18.0"
|
path: ~/.pnpm-store
|
||||||
cache: "pnpm"
|
key: ${{ needs.setup.outputs.cache-key }}
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Set up tests - Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
- name: Set up tests - Install browser 'chromium'
|
- name: Install Browser 'chromium'
|
||||||
run: pnpm playwright install --with-deps chromium
|
run: pnpm playwright install --with-deps chromium
|
||||||
|
|
||||||
- name: Run Playwright tests
|
- name: Run Playwright tests
|
||||||
@@ -281,7 +269,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Print Final Docker Compose logs
|
- name: Print Final Docker Compose logs
|
||||||
if: always()
|
if: always()
|
||||||
run: docker compose -f ../docker-compose.resolved.yml logs
|
run: docker compose -f ../docker-compose.yml logs
|
||||||
|
|
||||||
integration_test:
|
integration_test:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
@@ -293,15 +281,22 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- name: Enable corepack
|
- name: Set up Node.js
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: "22.18.0"
|
node-version: "22.18.0"
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
- name: Enable corepack
|
||||||
|
run: corepack enable
|
||||||
|
|
||||||
|
- name: Restore dependencies cache
|
||||||
|
uses: actions/cache@v5
|
||||||
|
with:
|
||||||
|
path: ~/.pnpm-store
|
||||||
|
key: ${{ needs.setup.outputs.cache-key }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
|
${{ runner.os }}-pnpm-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|||||||
@@ -1,195 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Add cache configuration to a resolved docker-compose file for all services
|
|
||||||
that have a build key, and ensure image names match what docker compose expects.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_BRANCH = "dev"
|
|
||||||
CACHE_BUILDS_FOR_COMPONENTS = ["backend", "frontend"]
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="Add cache config to a resolved compose file"
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--source",
|
|
||||||
required=True,
|
|
||||||
help="Source compose file to read (should be output of `docker compose config`)",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--cache-from",
|
|
||||||
default="type=gha",
|
|
||||||
help="Cache source configuration",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--cache-to",
|
|
||||||
default="type=gha,mode=max",
|
|
||||||
help="Cache destination configuration",
|
|
||||||
)
|
|
||||||
for component in CACHE_BUILDS_FOR_COMPONENTS:
|
|
||||||
parser.add_argument(
|
|
||||||
f"--{component}-hash",
|
|
||||||
default="",
|
|
||||||
help=f"Hash for {component} cache scope (e.g., from hashFiles())",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--git-ref",
|
|
||||||
default="",
|
|
||||||
help="Git ref for branch-based cache scope (e.g., refs/heads/master)",
|
|
||||||
)
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
# Normalize git ref to a safe scope name (e.g., refs/heads/master -> master)
|
|
||||||
git_ref_scope = ""
|
|
||||||
if args.git_ref:
|
|
||||||
git_ref_scope = args.git_ref.replace("refs/heads/", "").replace("/", "-")
|
|
||||||
|
|
||||||
with open(args.source, "r") as f:
|
|
||||||
compose = yaml.safe_load(f)
|
|
||||||
|
|
||||||
# Get project name from compose file or default
|
|
||||||
project_name = compose.get("name", "autogpt_platform")
|
|
||||||
|
|
||||||
def get_image_name(dockerfile: str, target: str) -> str:
|
|
||||||
"""Generate image name based on Dockerfile folder and build target."""
|
|
||||||
dockerfile_parts = dockerfile.replace("\\", "/").split("/")
|
|
||||||
if len(dockerfile_parts) >= 2:
|
|
||||||
folder_name = dockerfile_parts[-2] # e.g., "backend" or "frontend"
|
|
||||||
else:
|
|
||||||
folder_name = "app"
|
|
||||||
return f"{project_name}-{folder_name}:{target}"
|
|
||||||
|
|
||||||
def get_build_key(dockerfile: str, target: str) -> str:
|
|
||||||
"""Generate a unique key for a Dockerfile+target combination."""
|
|
||||||
return f"{dockerfile}:{target}"
|
|
||||||
|
|
||||||
def get_component(dockerfile: str) -> str | None:
|
|
||||||
"""Get component name (frontend/backend) from dockerfile path."""
|
|
||||||
for component in CACHE_BUILDS_FOR_COMPONENTS:
|
|
||||||
if component in dockerfile:
|
|
||||||
return component
|
|
||||||
return None
|
|
||||||
|
|
||||||
# First pass: collect all services with build configs and identify duplicates
|
|
||||||
# Track which (dockerfile, target) combinations we've seen
|
|
||||||
build_key_to_first_service: dict[str, str] = {}
|
|
||||||
services_to_build: list[str] = []
|
|
||||||
services_to_dedupe: list[str] = []
|
|
||||||
|
|
||||||
for service_name, service_config in compose.get("services", {}).items():
|
|
||||||
if "build" not in service_config:
|
|
||||||
continue
|
|
||||||
|
|
||||||
build_config = service_config["build"]
|
|
||||||
dockerfile = build_config.get("dockerfile", "Dockerfile")
|
|
||||||
target = build_config.get("target", "default")
|
|
||||||
build_key = get_build_key(dockerfile, target)
|
|
||||||
|
|
||||||
if build_key not in build_key_to_first_service:
|
|
||||||
# First service with this build config - it will do the actual build
|
|
||||||
build_key_to_first_service[build_key] = service_name
|
|
||||||
services_to_build.append(service_name)
|
|
||||||
else:
|
|
||||||
# Duplicate - will just use the image from the first service
|
|
||||||
services_to_dedupe.append(service_name)
|
|
||||||
|
|
||||||
# Second pass: configure builds and deduplicate
|
|
||||||
modified_services = []
|
|
||||||
for service_name, service_config in compose.get("services", {}).items():
|
|
||||||
if "build" not in service_config:
|
|
||||||
continue
|
|
||||||
|
|
||||||
build_config = service_config["build"]
|
|
||||||
dockerfile = build_config.get("dockerfile", "Dockerfile")
|
|
||||||
target = build_config.get("target", "latest")
|
|
||||||
image_name = get_image_name(dockerfile, target)
|
|
||||||
|
|
||||||
# Set image name for all services (needed for both builders and deduped)
|
|
||||||
service_config["image"] = image_name
|
|
||||||
|
|
||||||
if service_name in services_to_dedupe:
|
|
||||||
# Remove build config - this service will use the pre-built image
|
|
||||||
del service_config["build"]
|
|
||||||
continue
|
|
||||||
|
|
||||||
# This service will do the actual build - add cache config
|
|
||||||
cache_from_list = []
|
|
||||||
cache_to_list = []
|
|
||||||
|
|
||||||
component = get_component(dockerfile)
|
|
||||||
if not component:
|
|
||||||
# Skip services that don't clearly match frontend/backend
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get the hash for this component
|
|
||||||
component_hash = getattr(args, f"{component}_hash")
|
|
||||||
|
|
||||||
# Scope format: platform-{component}-{target}-{hash|ref}
|
|
||||||
# Example: platform-backend-server-abc123
|
|
||||||
|
|
||||||
if "type=gha" in args.cache_from:
|
|
||||||
# 1. Primary: exact hash match (most specific)
|
|
||||||
if component_hash:
|
|
||||||
hash_scope = f"platform-{component}-{target}-{component_hash}"
|
|
||||||
cache_from_list.append(f"{args.cache_from},scope={hash_scope}")
|
|
||||||
|
|
||||||
# 2. Fallback: branch-based cache
|
|
||||||
if git_ref_scope:
|
|
||||||
ref_scope = f"platform-{component}-{target}-{git_ref_scope}"
|
|
||||||
cache_from_list.append(f"{args.cache_from},scope={ref_scope}")
|
|
||||||
|
|
||||||
# 3. Fallback: dev branch cache (for PRs/feature branches)
|
|
||||||
if git_ref_scope and git_ref_scope != DEFAULT_BRANCH:
|
|
||||||
master_scope = f"platform-{component}-{target}-{DEFAULT_BRANCH}"
|
|
||||||
cache_from_list.append(f"{args.cache_from},scope={master_scope}")
|
|
||||||
|
|
||||||
if "type=gha" in args.cache_to:
|
|
||||||
# Write to both hash-based and branch-based scopes
|
|
||||||
if component_hash:
|
|
||||||
hash_scope = f"platform-{component}-{target}-{component_hash}"
|
|
||||||
cache_to_list.append(f"{args.cache_to},scope={hash_scope}")
|
|
||||||
|
|
||||||
if git_ref_scope:
|
|
||||||
ref_scope = f"platform-{component}-{target}-{git_ref_scope}"
|
|
||||||
cache_to_list.append(f"{args.cache_to},scope={ref_scope}")
|
|
||||||
|
|
||||||
# Ensure we have at least one cache source/target
|
|
||||||
if not cache_from_list:
|
|
||||||
cache_from_list.append(args.cache_from)
|
|
||||||
if not cache_to_list:
|
|
||||||
cache_to_list.append(args.cache_to)
|
|
||||||
|
|
||||||
build_config["cache_from"] = cache_from_list
|
|
||||||
build_config["cache_to"] = cache_to_list
|
|
||||||
modified_services.append(service_name)
|
|
||||||
|
|
||||||
# Write back to the same file
|
|
||||||
with open(args.source, "w") as f:
|
|
||||||
yaml.dump(compose, f, default_flow_style=False, sort_keys=False)
|
|
||||||
|
|
||||||
print(f"Added cache config to {len(modified_services)} services in {args.source}:")
|
|
||||||
for svc in modified_services:
|
|
||||||
svc_config = compose["services"][svc]
|
|
||||||
build_cfg = svc_config.get("build", {})
|
|
||||||
cache_from_list = build_cfg.get("cache_from", ["none"])
|
|
||||||
cache_to_list = build_cfg.get("cache_to", ["none"])
|
|
||||||
print(f" - {svc}")
|
|
||||||
print(f" image: {svc_config.get('image', 'N/A')}")
|
|
||||||
print(f" cache_from: {cache_from_list}")
|
|
||||||
print(f" cache_to: {cache_to_list}")
|
|
||||||
if services_to_dedupe:
|
|
||||||
print(
|
|
||||||
f"Deduplicated {len(services_to_dedupe)} services (will use pre-built images):"
|
|
||||||
)
|
|
||||||
for svc in services_to_dedupe:
|
|
||||||
print(f" - {svc} -> {compose['services'][svc].get('image', 'N/A')}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,5 +1,3 @@
|
|||||||
# ============================ DEPENDENCY BUILDER ============================ #
|
|
||||||
|
|
||||||
FROM debian:13-slim AS builder
|
FROM debian:13-slim AS builder
|
||||||
|
|
||||||
# Set environment variables
|
# Set environment variables
|
||||||
@@ -53,9 +51,7 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti
|
|||||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
||||||
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
||||||
|
|
||||||
# ============================== BACKEND SERVER ============================== #
|
FROM debian:13-slim AS server_dependencies
|
||||||
|
|
||||||
FROM debian:13-slim AS server
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
@@ -67,14 +63,15 @@ ENV POETRY_HOME=/opt/poetry \
|
|||||||
ENV PATH=/opt/poetry/bin:$PATH
|
ENV PATH=/opt/poetry/bin:$PATH
|
||||||
|
|
||||||
# Install Python, FFmpeg, and ImageMagick (required for video processing blocks)
|
# Install Python, FFmpeg, and ImageMagick (required for video processing blocks)
|
||||||
# Using --no-install-recommends saves ~650MB by skipping unnecessary deps like llvm, mesa, etc.
|
RUN apt-get update && apt-get install -y \
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python3.13 \
|
python3.13 \
|
||||||
python3-pip \
|
python3-pip \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
imagemagick \
|
imagemagick \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Copy only necessary files from builder
|
||||||
|
COPY --from=builder /app /app
|
||||||
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
||||||
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
||||||
# Copy Node.js installation for Prisma
|
# Copy Node.js installation for Prisma
|
||||||
@@ -84,54 +81,30 @@ COPY --from=builder /usr/bin/npm /usr/bin/npm
|
|||||||
COPY --from=builder /usr/bin/npx /usr/bin/npx
|
COPY --from=builder /usr/bin/npx /usr/bin/npx
|
||||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||||
|
|
||||||
WORKDIR /app/autogpt_platform/backend
|
|
||||||
|
|
||||||
# Copy only the .venv from builder (not the entire /app directory)
|
|
||||||
# The .venv includes the generated Prisma client
|
|
||||||
COPY --from=builder /app/autogpt_platform/backend/.venv ./.venv
|
|
||||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||||
|
|
||||||
# Copy dependency files + autogpt_libs (path dependency)
|
RUN mkdir -p /app/autogpt_platform/autogpt_libs
|
||||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
RUN mkdir -p /app/autogpt_platform/backend
|
||||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml ./
|
|
||||||
|
|
||||||
# Copy backend code + docs (for Copilot docs search)
|
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||||
COPY autogpt_platform/backend ./
|
|
||||||
|
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||||
|
|
||||||
|
WORKDIR /app/autogpt_platform/backend
|
||||||
|
|
||||||
|
FROM server_dependencies AS migrate
|
||||||
|
|
||||||
|
# Migration stage only needs schema and migrations - much lighter than full backend
|
||||||
|
COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/
|
||||||
|
COPY autogpt_platform/backend/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py
|
||||||
|
COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations
|
||||||
|
|
||||||
|
FROM server_dependencies AS server
|
||||||
|
|
||||||
|
COPY autogpt_platform/backend /app/autogpt_platform/backend
|
||||||
COPY docs /app/docs
|
COPY docs /app/docs
|
||||||
RUN poetry install --no-ansi --only-root
|
RUN poetry install --no-ansi --only-root
|
||||||
|
|
||||||
ENV PORT=8000
|
ENV PORT=8000
|
||||||
|
|
||||||
CMD ["poetry", "run", "rest"]
|
CMD ["poetry", "run", "rest"]
|
||||||
|
|
||||||
# =============================== DB MIGRATOR =============================== #
|
|
||||||
|
|
||||||
# Lightweight migrate stage - only needs Prisma CLI, not full Python environment
|
|
||||||
FROM debian:13-slim AS migrate
|
|
||||||
|
|
||||||
WORKDIR /app/autogpt_platform/backend
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
|
||||||
|
|
||||||
# Install only what's needed for prisma migrate: Node.js and minimal Python for prisma-python
|
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
||||||
python3.13 \
|
|
||||||
python3-pip \
|
|
||||||
ca-certificates \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
# Copy Node.js from builder (needed for Prisma CLI)
|
|
||||||
COPY --from=builder /usr/bin/node /usr/bin/node
|
|
||||||
COPY --from=builder /usr/lib/node_modules /usr/lib/node_modules
|
|
||||||
COPY --from=builder /usr/bin/npm /usr/bin/npm
|
|
||||||
|
|
||||||
# Copy Prisma binaries
|
|
||||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
|
||||||
|
|
||||||
# Install prisma-client-py directly (much smaller than copying full venv)
|
|
||||||
RUN pip3 install prisma>=0.15.0 --break-system-packages
|
|
||||||
|
|
||||||
COPY autogpt_platform/backend/schema.prisma ./
|
|
||||||
COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py
|
|
||||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
|
||||||
COPY autogpt_platform/backend/migrations ./migrations
|
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
import base64
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import shlex
|
import shlex
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Literal, Optional
|
from typing import Literal, Optional
|
||||||
@@ -21,6 +23,11 @@ from backend.data.model import (
|
|||||||
)
|
)
|
||||||
from backend.integrations.providers import ProviderName
|
from backend.integrations.providers import ProviderName
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Maximum size for binary files to extract (50MB)
|
||||||
|
MAX_BINARY_FILE_SIZE = 50 * 1024 * 1024
|
||||||
|
|
||||||
|
|
||||||
class ClaudeCodeExecutionError(Exception):
|
class ClaudeCodeExecutionError(Exception):
|
||||||
"""Exception raised when Claude Code execution fails.
|
"""Exception raised when Claude Code execution fails.
|
||||||
@@ -180,7 +187,9 @@ class ClaudeCodeBlock(Block):
|
|||||||
path: str
|
path: str
|
||||||
relative_path: str # Path relative to working directory (for GitHub, etc.)
|
relative_path: str # Path relative to working directory (for GitHub, etc.)
|
||||||
name: str
|
name: str
|
||||||
content: str
|
content: str # Text content for text files, empty string for binary files
|
||||||
|
is_binary: bool = False # True if this is a binary file
|
||||||
|
content_base64: Optional[str] = None # Base64-encoded content for binary files
|
||||||
|
|
||||||
class Output(BlockSchemaOutput):
|
class Output(BlockSchemaOutput):
|
||||||
response: str = SchemaField(
|
response: str = SchemaField(
|
||||||
@@ -188,8 +197,11 @@ class ClaudeCodeBlock(Block):
|
|||||||
)
|
)
|
||||||
files: list["ClaudeCodeBlock.FileOutput"] = SchemaField(
|
files: list["ClaudeCodeBlock.FileOutput"] = SchemaField(
|
||||||
description=(
|
description=(
|
||||||
"List of text files created/modified by Claude Code during this execution. "
|
"List of files created/modified by Claude Code during this execution. "
|
||||||
"Each file has 'path', 'relative_path', 'name', and 'content' fields."
|
"Each file has 'path', 'relative_path', 'name', 'content', 'is_binary', "
|
||||||
|
"and 'content_base64' fields. For text files, 'content' contains the text "
|
||||||
|
"and 'is_binary' is False. For binary files (PDFs, images, etc.), "
|
||||||
|
"'is_binary' is True and 'content_base64' contains the base64-encoded data."
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
conversation_history: str = SchemaField(
|
conversation_history: str = SchemaField(
|
||||||
@@ -252,6 +264,8 @@ class ClaudeCodeBlock(Block):
|
|||||||
"relative_path": "index.html",
|
"relative_path": "index.html",
|
||||||
"name": "index.html",
|
"name": "index.html",
|
||||||
"content": "<html>Hello World</html>",
|
"content": "<html>Hello World</html>",
|
||||||
|
"is_binary": False,
|
||||||
|
"content_base64": None,
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
@@ -272,6 +286,8 @@ class ClaudeCodeBlock(Block):
|
|||||||
relative_path="index.html",
|
relative_path="index.html",
|
||||||
name="index.html",
|
name="index.html",
|
||||||
content="<html>Hello World</html>",
|
content="<html>Hello World</html>",
|
||||||
|
is_binary=False,
|
||||||
|
content_base64=None,
|
||||||
)
|
)
|
||||||
], # files
|
], # files
|
||||||
"User: Create a hello world HTML file\n"
|
"User: Create a hello world HTML file\n"
|
||||||
@@ -531,7 +547,6 @@ class ClaudeCodeBlock(Block):
|
|||||||
".env",
|
".env",
|
||||||
".gitignore",
|
".gitignore",
|
||||||
".dockerfile",
|
".dockerfile",
|
||||||
"Dockerfile",
|
|
||||||
".vue",
|
".vue",
|
||||||
".svelte",
|
".svelte",
|
||||||
".astro",
|
".astro",
|
||||||
@@ -540,6 +555,44 @@ class ClaudeCodeBlock(Block):
|
|||||||
".tex",
|
".tex",
|
||||||
".csv",
|
".csv",
|
||||||
".log",
|
".log",
|
||||||
|
".svg", # SVG is XML-based text
|
||||||
|
}
|
||||||
|
|
||||||
|
# Binary file extensions we can read and base64-encode
|
||||||
|
binary_extensions = {
|
||||||
|
# Images
|
||||||
|
".png",
|
||||||
|
".jpg",
|
||||||
|
".jpeg",
|
||||||
|
".gif",
|
||||||
|
".webp",
|
||||||
|
".ico",
|
||||||
|
".bmp",
|
||||||
|
".tiff",
|
||||||
|
".tif",
|
||||||
|
# Documents
|
||||||
|
".pdf",
|
||||||
|
# Archives (useful for downloads)
|
||||||
|
".zip",
|
||||||
|
".tar",
|
||||||
|
".gz",
|
||||||
|
".7z",
|
||||||
|
# Audio/Video (if small enough)
|
||||||
|
".mp3",
|
||||||
|
".wav",
|
||||||
|
".mp4",
|
||||||
|
".webm",
|
||||||
|
# Other binary formats
|
||||||
|
".woff",
|
||||||
|
".woff2",
|
||||||
|
".ttf",
|
||||||
|
".otf",
|
||||||
|
".eot",
|
||||||
|
".bin",
|
||||||
|
".exe",
|
||||||
|
".dll",
|
||||||
|
".so",
|
||||||
|
".dylib",
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -564,10 +617,26 @@ class ClaudeCodeBlock(Block):
|
|||||||
if not file_path:
|
if not file_path:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check if it's a text file we can read
|
# Check if it's a text file we can read (case-insensitive)
|
||||||
|
file_path_lower = file_path.lower()
|
||||||
is_text = any(
|
is_text = any(
|
||||||
file_path.endswith(ext) for ext in text_extensions
|
file_path_lower.endswith(ext) for ext in text_extensions
|
||||||
) or file_path.endswith("Dockerfile")
|
) or file_path_lower.endswith("dockerfile")
|
||||||
|
|
||||||
|
# Check if it's a binary file we should extract
|
||||||
|
is_binary = any(
|
||||||
|
file_path_lower.endswith(ext) for ext in binary_extensions
|
||||||
|
)
|
||||||
|
|
||||||
|
# Helper to extract filename and relative path
|
||||||
|
def get_file_info(path: str, work_dir: str) -> tuple[str, str]:
|
||||||
|
name = path.split("/")[-1]
|
||||||
|
rel_path = path
|
||||||
|
if path.startswith(work_dir):
|
||||||
|
rel_path = path[len(work_dir) :]
|
||||||
|
if rel_path.startswith("/"):
|
||||||
|
rel_path = rel_path[1:]
|
||||||
|
return name, rel_path
|
||||||
|
|
||||||
if is_text:
|
if is_text:
|
||||||
try:
|
try:
|
||||||
@@ -576,32 +645,72 @@ class ClaudeCodeBlock(Block):
|
|||||||
if isinstance(content, bytes):
|
if isinstance(content, bytes):
|
||||||
content = content.decode("utf-8", errors="replace")
|
content = content.decode("utf-8", errors="replace")
|
||||||
|
|
||||||
# Extract filename from path
|
file_name, relative_path = get_file_info(
|
||||||
file_name = file_path.split("/")[-1]
|
file_path, working_directory
|
||||||
|
)
|
||||||
# Calculate relative path by stripping working directory
|
|
||||||
relative_path = file_path
|
|
||||||
if file_path.startswith(working_directory):
|
|
||||||
relative_path = file_path[len(working_directory) :]
|
|
||||||
# Remove leading slash if present
|
|
||||||
if relative_path.startswith("/"):
|
|
||||||
relative_path = relative_path[1:]
|
|
||||||
|
|
||||||
files.append(
|
files.append(
|
||||||
ClaudeCodeBlock.FileOutput(
|
ClaudeCodeBlock.FileOutput(
|
||||||
path=file_path,
|
path=file_path,
|
||||||
relative_path=relative_path,
|
relative_path=relative_path,
|
||||||
name=file_name,
|
name=file_name,
|
||||||
content=content,
|
content=content,
|
||||||
|
is_binary=False,
|
||||||
|
content_base64=None,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception as e:
|
||||||
# Skip files that can't be read
|
logger.warning(f"Failed to read text file {file_path}: {e}")
|
||||||
pass
|
elif is_binary:
|
||||||
|
try:
|
||||||
|
# Check file size before reading to avoid OOM
|
||||||
|
stat_result = await sandbox.commands.run(
|
||||||
|
f"stat -c %s {shlex.quote(file_path)} 2>/dev/null"
|
||||||
|
)
|
||||||
|
if stat_result.exit_code != 0 or not stat_result.stdout:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping binary file {file_path}: "
|
||||||
|
f"could not determine file size"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
file_size = int(stat_result.stdout.strip())
|
||||||
|
if file_size > MAX_BINARY_FILE_SIZE:
|
||||||
|
logger.warning(
|
||||||
|
f"Skipping binary file {file_path}: "
|
||||||
|
f"size {file_size} exceeds limit "
|
||||||
|
f"{MAX_BINARY_FILE_SIZE}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
except Exception:
|
# Read binary file as bytes using format="bytes"
|
||||||
# If file extraction fails, return empty results
|
content_bytes = await sandbox.files.read(
|
||||||
pass
|
file_path, format="bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Base64 encode the binary content
|
||||||
|
content_b64 = base64.b64encode(content_bytes).decode(
|
||||||
|
"ascii"
|
||||||
|
)
|
||||||
|
|
||||||
|
file_name, relative_path = get_file_info(
|
||||||
|
file_path, working_directory
|
||||||
|
)
|
||||||
|
files.append(
|
||||||
|
ClaudeCodeBlock.FileOutput(
|
||||||
|
path=file_path,
|
||||||
|
relative_path=relative_path,
|
||||||
|
name=file_name,
|
||||||
|
content="", # Empty for binary files
|
||||||
|
is_binary=True,
|
||||||
|
content_base64=content_b64,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Failed to read binary file {file_path}: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"File extraction failed: {e}")
|
||||||
|
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ services:
|
|||||||
context: ../
|
context: ../
|
||||||
dockerfile: autogpt_platform/backend/Dockerfile
|
dockerfile: autogpt_platform/backend/Dockerfile
|
||||||
target: migrate
|
target: migrate
|
||||||
command: ["sh", "-c", "prisma generate && python3 gen_prisma_types_stub.py && prisma migrate deploy"]
|
command: ["sh", "-c", "poetry run prisma generate && poetry run gen-prisma-stub && poetry run prisma migrate deploy"]
|
||||||
develop:
|
develop:
|
||||||
watch:
|
watch:
|
||||||
- path: ./
|
- path: ./
|
||||||
@@ -56,7 +56,7 @@ services:
|
|||||||
test:
|
test:
|
||||||
[
|
[
|
||||||
"CMD-SHELL",
|
"CMD-SHELL",
|
||||||
"prisma migrate status | grep -q 'No pending migrations' || exit 1",
|
"poetry run prisma migrate status | grep -q 'No pending migrations' || exit 1",
|
||||||
]
|
]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ When activated, the block:
|
|||||||
- Install dependencies (npm, pip, etc.)
|
- Install dependencies (npm, pip, etc.)
|
||||||
- Run terminal commands
|
- Run terminal commands
|
||||||
- Build and test applications
|
- Build and test applications
|
||||||
5. Extracts all text files created/modified during execution
|
5. Extracts all text and binary files created/modified during execution
|
||||||
6. Returns the response and files, optionally keeping the sandbox alive for follow-up tasks
|
6. Returns the response and files, optionally keeping the sandbox alive for follow-up tasks
|
||||||
|
|
||||||
The block supports conversation continuation through three mechanisms:
|
The block supports conversation continuation through three mechanisms:
|
||||||
@@ -42,7 +42,7 @@ The block supports conversation continuation through three mechanisms:
|
|||||||
| Output | Description |
|
| Output | Description |
|
||||||
|--------|-------------|
|
|--------|-------------|
|
||||||
| Response | The output/response from Claude Code execution |
|
| Response | The output/response from Claude Code execution |
|
||||||
| Files | List of text files created/modified during execution. Each file includes path, relative_path, name, and content fields |
|
| Files | List of files created/modified during execution. Each file includes path, relative_path, name, content, is_binary, and content_base64 fields. For text files, content contains the text and is_binary is False. For binary files (PDFs, images, etc.), is_binary is True and content_base64 contains the base64-encoded data |
|
||||||
| Conversation History | Full conversation history including this turn. Use to restore context on a fresh sandbox |
|
| Conversation History | Full conversation history including this turn. Use to restore context on a fresh sandbox |
|
||||||
| Session ID | Session ID for this conversation. Pass back with sandbox_id to continue the conversation |
|
| Session ID | Session ID for this conversation. Pass back with sandbox_id to continue the conversation |
|
||||||
| Sandbox ID | ID of the sandbox instance (null if disposed). Pass back with session_id to continue the conversation |
|
| Sandbox ID | ID of the sandbox instance (null if disposed). Pass back with session_id to continue the conversation |
|
||||||
|
|||||||
@@ -535,7 +535,7 @@ When activated, the block:
|
|||||||
2. Installs the latest version of Claude Code in the sandbox
|
2. Installs the latest version of Claude Code in the sandbox
|
||||||
3. Optionally runs setup commands to prepare the environment
|
3. Optionally runs setup commands to prepare the environment
|
||||||
4. Executes your prompt using Claude Code, which can create/edit files, install dependencies, run terminal commands, and build applications
|
4. Executes your prompt using Claude Code, which can create/edit files, install dependencies, run terminal commands, and build applications
|
||||||
5. Extracts all text files created/modified during execution
|
5. Extracts all text and binary files created/modified during execution
|
||||||
6. Returns the response and files, optionally keeping the sandbox alive for follow-up tasks
|
6. Returns the response and files, optionally keeping the sandbox alive for follow-up tasks
|
||||||
|
|
||||||
The block supports conversation continuation through three mechanisms:
|
The block supports conversation continuation through three mechanisms:
|
||||||
@@ -563,7 +563,7 @@ The block supports conversation continuation through three mechanisms:
|
|||||||
|--------|-------------|------|
|
|--------|-------------|------|
|
||||||
| error | Error message if execution failed | str |
|
| error | Error message if execution failed | str |
|
||||||
| response | The output/response from Claude Code execution | str |
|
| response | The output/response from Claude Code execution | str |
|
||||||
| files | List of text files created/modified by Claude Code during this execution. Each file has 'path', 'relative_path', 'name', and 'content' fields. | List[FileOutput] |
|
| files | List of files created/modified by Claude Code during this execution. Each file has 'path', 'relative_path', 'name', 'content', 'is_binary', and 'content_base64' fields. For text files, 'content' contains the text and 'is_binary' is False. For binary files (PDFs, images, etc.), 'is_binary' is True and 'content_base64' contains the base64-encoded data. | List[FileOutput] |
|
||||||
| conversation_history | Full conversation history including this turn. Pass this to conversation_history input to continue on a fresh sandbox if the previous sandbox timed out. | str |
|
| conversation_history | Full conversation history including this turn. Pass this to conversation_history input to continue on a fresh sandbox if the previous sandbox timed out. | str |
|
||||||
| session_id | Session ID for this conversation. Pass this back along with sandbox_id to continue the conversation. | str |
|
| session_id | Session ID for this conversation. Pass this back along with sandbox_id to continue the conversation. | str |
|
||||||
| sandbox_id | ID of the sandbox instance. Pass this back along with session_id to continue the conversation. This is None if dispose_sandbox was True (sandbox was disposed). | str |
|
| sandbox_id | ID of the sandbox instance. Pass this back along with session_id to continue the conversation. This is None if dispose_sandbox was True (sandbox was disposed). | str |
|
||||||
|
|||||||
@@ -1,165 +0,0 @@
|
|||||||
# Implementation Plan: SECRT-1950 - Apply E2E CI Optimizations to Claude Code Workflows
|
|
||||||
|
|
||||||
## Ticket
|
|
||||||
[SECRT-1950](https://linear.app/autogpt/issue/SECRT-1950)
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
Apply Pwuts's CI performance optimizations from PR #12090 to Claude Code workflows.
|
|
||||||
|
|
||||||
## Reference PR
|
|
||||||
https://github.com/Significant-Gravitas/AutoGPT/pull/12090
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Analysis
|
|
||||||
|
|
||||||
### Current State (claude.yml)
|
|
||||||
|
|
||||||
**pnpm caching (lines 104-118):**
|
|
||||||
```yaml
|
|
||||||
- name: Set up Node.js
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version: "22"
|
|
||||||
|
|
||||||
- name: Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set pnpm store directory
|
|
||||||
run: |
|
|
||||||
pnpm config set store-dir ~/.pnpm-store
|
|
||||||
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Cache frontend dependencies
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
|
||||||
path: ~/.pnpm-store
|
|
||||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
|
||||||
${{ runner.os }}-pnpm-
|
|
||||||
```
|
|
||||||
|
|
||||||
**Docker setup (lines 134-165):**
|
|
||||||
- Uses `docker-buildx-action@v3`
|
|
||||||
- Has manual Docker image caching via `actions/cache`
|
|
||||||
- Runs `docker compose up` without buildx bake optimization
|
|
||||||
|
|
||||||
### Pwuts's Optimizations (PR #12090)
|
|
||||||
|
|
||||||
1. **Simplified pnpm caching** - Use `setup-node` built-in cache:
|
|
||||||
```yaml
|
|
||||||
- name: Enable corepack
|
|
||||||
run: corepack enable
|
|
||||||
|
|
||||||
- name: Set up Node
|
|
||||||
uses: actions/setup-node@v6
|
|
||||||
with:
|
|
||||||
node-version: "22.18.0"
|
|
||||||
cache: "pnpm"
|
|
||||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Docker build caching via buildx bake**:
|
|
||||||
```yaml
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
driver: docker-container
|
|
||||||
driver-opts: network=host
|
|
||||||
|
|
||||||
- name: Expose GHA cache to docker buildx CLI
|
|
||||||
uses: crazy-max/ghaction-github-runtime@v3
|
|
||||||
|
|
||||||
- name: Build Docker images (with cache)
|
|
||||||
run: |
|
|
||||||
pip install pyyaml
|
|
||||||
docker compose -f docker-compose.yml config > docker-compose.resolved.yml
|
|
||||||
python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \
|
|
||||||
--source docker-compose.resolved.yml \
|
|
||||||
--cache-from "type=gha" \
|
|
||||||
--cache-to "type=gha,mode=max" \
|
|
||||||
...
|
|
||||||
docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Proposed Changes
|
|
||||||
|
|
||||||
### 1. Update pnpm caching in `claude.yml`
|
|
||||||
|
|
||||||
**Before:**
|
|
||||||
- Manual cache key generation
|
|
||||||
- Separate `actions/cache` step
|
|
||||||
- Manual pnpm store directory config
|
|
||||||
|
|
||||||
**After:**
|
|
||||||
- Use `setup-node` built-in `cache: "pnpm"` option
|
|
||||||
- Remove manual cache step
|
|
||||||
- Keep `corepack enable` before `setup-node`
|
|
||||||
|
|
||||||
### 2. Update Docker build in `claude.yml`
|
|
||||||
|
|
||||||
**Before:**
|
|
||||||
- Manual Docker layer caching via `actions/cache` with `/tmp/.buildx-cache`
|
|
||||||
- Simple `docker compose build`
|
|
||||||
|
|
||||||
**After:**
|
|
||||||
- Use `crazy-max/ghaction-github-runtime@v3` to expose GHA cache
|
|
||||||
- Use `docker-ci-fix-compose-build-cache.py` script
|
|
||||||
- Build with `docker buildx bake`
|
|
||||||
|
|
||||||
### 3. Apply same changes to other Claude workflows
|
|
||||||
|
|
||||||
- `claude-dependabot.yml` - Check if it has similar patterns
|
|
||||||
- `claude-ci-failure-auto-fix.yml` - Check if it has similar patterns
|
|
||||||
- `copilot-setup-steps.yml` - Reusable workflow, may be the source of truth
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Files to Modify
|
|
||||||
|
|
||||||
1. `.github/workflows/claude.yml`
|
|
||||||
2. `.github/workflows/claude-dependabot.yml` (if applicable)
|
|
||||||
3. `.github/workflows/claude-ci-failure-auto-fix.yml` (if applicable)
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
|
|
||||||
- PR #12090 must be merged first (provides the `docker-ci-fix-compose-build-cache.py` script)
|
|
||||||
- Backend Dockerfile optimizations (already in PR #12090)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Test Plan
|
|
||||||
|
|
||||||
1. Create PR with changes
|
|
||||||
2. Trigger Claude workflow manually or via `@claude` mention on a test issue
|
|
||||||
3. Compare CI runtime before/after
|
|
||||||
4. Verify Claude agent still works correctly (can checkout, build, run tests)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Risk Assessment
|
|
||||||
|
|
||||||
**Low risk:**
|
|
||||||
- These are CI infrastructure changes, not code changes
|
|
||||||
- If caching fails, builds fall back to uncached (slower but works)
|
|
||||||
- Changes mirror proven patterns from PR #12090
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Questions for Reviewer
|
|
||||||
|
|
||||||
1. Should we wait for PR #12090 to merge before creating this PR?
|
|
||||||
2. Does `copilot-setup-steps.yml` need updating, or is it a separate concern?
|
|
||||||
3. Any concerns about cache key collisions between frontend E2E and Claude workflows?
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Verified
|
|
||||||
|
|
||||||
- ✅ **`claude-dependabot.yml`**: Has same pnpm caching pattern as `claude.yml` (manual `actions/cache`) — NEEDS UPDATE
|
|
||||||
- ✅ **`claude-ci-failure-auto-fix.yml`**: Simple workflow with no pnpm or Docker caching — NO CHANGES NEEDED
|
|
||||||
- ✅ **Script path**: `docker-ci-fix-compose-build-cache.py` will be at `.github/workflows/scripts/` after PR #12090 merges
|
|
||||||
- ✅ **Test seed caching**: NOT APPLICABLE — Claude workflows spin up a dev environment but don't run E2E tests with pre-seeded data. The seed caching in PR #12090 is specific to the frontend E2E test suite which needs consistent test data. Claude just needs the services running.
|
|
||||||
Reference in New Issue
Block a user