mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-12 07:45:14 -05:00
Compare commits
4 Commits
fix/claude
...
pwuts/spee
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7cdbbdd65e | ||
|
|
6191ac0b1e | ||
|
|
b51e87bc53 | ||
|
|
71f764f3d0 |
49
.github/workflows/platform-frontend-ci.yml
vendored
49
.github/workflows/platform-frontend-ci.yml
vendored
@@ -142,9 +142,6 @@ jobs:
|
|||||||
|
|
||||||
e2e_test:
|
e2e_test:
|
||||||
runs-on: big-boi
|
runs-on: big-boi
|
||||||
needs: setup
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
@@ -174,29 +171,29 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Cache Docker layers
|
|
||||||
uses: actions/cache@v5
|
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
driver-opts: network=host
|
||||||
key: ${{ runner.os }}-buildx-frontend-test-${{ hashFiles('autogpt_platform/docker-compose.yml', 'autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/pyproject.toml', 'autogpt_platform/backend/poetry.lock') }}
|
|
||||||
restore-keys: |
|
- name: Build Docker images with cache
|
||||||
${{ runner.os }}-buildx-frontend-test-
|
working-directory: autogpt_platform
|
||||||
|
run: |
|
||||||
|
pip install pyyaml
|
||||||
|
python ../.github/workflows/scripts/generate-docker-ci-compose.py \
|
||||||
|
--source docker-compose.platform.yml \
|
||||||
|
--output docker-compose.ci.yml \
|
||||||
|
--cache-from "type=gha" \
|
||||||
|
--cache-to "type=gha,mode=max" \
|
||||||
|
--backend-scope "platform-backend-${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend') }}" \
|
||||||
|
--frontend-scope "platform-frontend-${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src') }}"
|
||||||
|
|
||||||
|
docker buildx bake --allow=fs.read=.. -f docker-compose.yml -f docker-compose.ci.yml --load
|
||||||
|
env:
|
||||||
|
NEXT_PUBLIC_PW_TEST: true
|
||||||
|
|
||||||
- name: Run docker compose
|
- name: Run docker compose
|
||||||
run: |
|
run: docker compose -f ../docker-compose.yml up -d --no-build
|
||||||
NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d
|
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
NEXT_PUBLIC_PW_TEST: true
|
||||||
BUILDX_CACHE_FROM: type=local,src=/tmp/.buildx-cache
|
|
||||||
BUILDX_CACHE_TO: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
|
||||||
|
|
||||||
- name: Move cache
|
|
||||||
run: |
|
|
||||||
rm -rf /tmp/.buildx-cache
|
|
||||||
if [ -d "/tmp/.buildx-cache-new" ]; then
|
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Wait for services to be ready
|
- name: Wait for services to be ready
|
||||||
run: |
|
run: |
|
||||||
@@ -230,14 +227,14 @@ jobs:
|
|||||||
}
|
}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Restore dependencies cache
|
- name: Cache pnpm store
|
||||||
uses: actions/cache@v5
|
uses: actions/cache@v5
|
||||||
with:
|
with:
|
||||||
path: ~/.pnpm-store
|
path: ~/.pnpm-store
|
||||||
key: ${{ needs.setup.outputs.cache-key }}
|
# Use separate cache key for big-boi runner since it doesn't share cache with ubuntu-latest
|
||||||
|
key: big-boi-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
big-boi-pnpm-
|
||||||
${{ runner.os }}-pnpm-
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|||||||
93
.github/workflows/scripts/generate-docker-ci-compose.py
vendored
Normal file
93
.github/workflows/scripts/generate-docker-ci-compose.py
vendored
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Generate a docker-compose.ci.yml with cache configuration for all services
|
||||||
|
that have a build key in the source compose file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Generate docker-compose cache override file"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--source",
|
||||||
|
default="docker-compose.platform.yml",
|
||||||
|
help="Source compose file to read (default: docker-compose.platform.yml)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--output",
|
||||||
|
default="docker-compose.ci.yml",
|
||||||
|
help="Output compose file to write (default: docker-compose.ci.yml)",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--cache-from",
|
||||||
|
default="type=local,src=/tmp/.buildx-cache",
|
||||||
|
help="Cache source configuration",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--cache-to",
|
||||||
|
default="type=local,dest=/tmp/.buildx-cache-new,mode=max",
|
||||||
|
help="Cache destination configuration",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--backend-scope",
|
||||||
|
default="",
|
||||||
|
help="GHA cache scope for backend services (e.g., platform-backend-{hash})",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--frontend-scope",
|
||||||
|
default="",
|
||||||
|
help="GHA cache scope for frontend service (e.g., platform-frontend-{hash})",
|
||||||
|
)
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
with open(args.source, "r") as f:
|
||||||
|
compose = yaml.safe_load(f)
|
||||||
|
|
||||||
|
ci_compose = {"services": {}}
|
||||||
|
for service_name, service_config in compose.get("services", {}).items():
|
||||||
|
if "build" not in service_config:
|
||||||
|
continue
|
||||||
|
|
||||||
|
cache_from = args.cache_from
|
||||||
|
cache_to = args.cache_to
|
||||||
|
|
||||||
|
# Determine scope based on Dockerfile path
|
||||||
|
if "type=gha" in args.cache_from or "type=gha" in args.cache_to:
|
||||||
|
dockerfile = service_config["build"].get("dockerfile", "Dockerfile")
|
||||||
|
if "frontend" in dockerfile:
|
||||||
|
scope = args.frontend_scope
|
||||||
|
elif "backend" in dockerfile:
|
||||||
|
scope = args.backend_scope
|
||||||
|
else:
|
||||||
|
# Skip services that don't clearly match frontend/backend
|
||||||
|
continue
|
||||||
|
|
||||||
|
if scope:
|
||||||
|
if "type=gha" in args.cache_from:
|
||||||
|
cache_from = f"{args.cache_from},scope={scope}"
|
||||||
|
if "type=gha" in args.cache_to:
|
||||||
|
cache_to = f"{args.cache_to},scope={scope}"
|
||||||
|
|
||||||
|
ci_compose["services"][service_name] = {
|
||||||
|
"build": {
|
||||||
|
"cache_from": [cache_from],
|
||||||
|
"cache_to": [cache_to],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(args.output, "w") as f:
|
||||||
|
yaml.dump(ci_compose, f, default_flow_style=False)
|
||||||
|
|
||||||
|
services = list(ci_compose["services"].keys())
|
||||||
|
print(f"Generated {args.output} with cache config for {len(services)} services:")
|
||||||
|
for svc in services:
|
||||||
|
print(f" - {svc}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user