mirror of
https://github.com/zama-ai/concrete.git
synced 2026-04-17 03:00:54 -04:00
This is done to handle downloading of documentation artifacts. Doing this between separate workflow is troublesome especially when you have to wait on serveral of them.
238 lines
8.3 KiB
YAML
238 lines
8.3 KiB
YAML
# Build on an AWS instance (CPU).
|
|
name: AwsEc2BuildCpu
|
|
|
|
on:
|
|
workflow_dispatch:
|
|
inputs:
|
|
instance_id:
|
|
description: 'Instance ID'
|
|
type: string
|
|
instance_image_id:
|
|
description: 'Instance AMI ID'
|
|
type: string
|
|
instance_type:
|
|
description: 'Instance product type'
|
|
type: string
|
|
runner_name:
|
|
description: 'Action runner name'
|
|
type: string
|
|
request_id:
|
|
description: 'Slab request ID'
|
|
type: string
|
|
matrix_item:
|
|
description: 'Build matrix item'
|
|
type: string
|
|
|
|
env:
|
|
DOCKER_IMAGE_TEST: ghcr.io/zama-ai/concrete-compiler
|
|
|
|
jobs:
|
|
BuildAndTest:
|
|
name: Build and test compiler in EC2
|
|
concurrency:
|
|
group: ${{ github.ref }}_${{ github.event.inputs.instance_image_id }}_${{ github.event.inputs.instance_type }}
|
|
cancel-in-progress: true
|
|
runs-on: ${{ github.event.inputs.runner_name }}
|
|
if: ${{ !cancelled() }}
|
|
steps:
|
|
- name: Instance configuration used
|
|
run: |
|
|
echo "IDs: ${{ inputs.instance_id }}"
|
|
echo "AMI: ${{ inputs.instance_image_id }}"
|
|
echo "Type: ${{ inputs.instance_type }}"
|
|
echo "Request ID: ${{ inputs.request_id }}"
|
|
echo "Matrix item: ${{ inputs.matrix_item }}"
|
|
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- name: Set up SSH agent
|
|
uses: webfactory/ssh-agent@v0.5.2
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- name: Set up env
|
|
# "Install rust" step require root user to have a HOME directory which is not set.
|
|
run: |
|
|
echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}"
|
|
#echo "SSH_AUTH_SOCK=$SSH_AUTH_SOCK)" >> "${GITHUB_ENV}"
|
|
echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}"
|
|
|
|
# Free 4Gb of workspace
|
|
- name: Freeing space
|
|
run: |
|
|
df -h
|
|
for image in ubuntu:{16,18}.04 \
|
|
node:{12,14,16}{-alpine,} \
|
|
buildpack-deps:{stretch,buster,bullseye} \
|
|
debian:{9,10,11} alpine:3.{12,13,14} \
|
|
moby/buildkit:latest docker:20.10
|
|
do
|
|
docker image rm $image || echo Please clean remove it from this step
|
|
done
|
|
df -h
|
|
|
|
- name: Fetch repository
|
|
uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Install rust
|
|
uses: actions-rs/toolchain@v1
|
|
with:
|
|
toolchain: stable
|
|
override: true
|
|
|
|
- name: Create build dir
|
|
run: mkdir build
|
|
|
|
- name: Build compiler
|
|
uses: addnab/docker-run-action@v3
|
|
id: build-compiler
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/build:/build
|
|
-v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket
|
|
-e SSH_AUTH_SOCK=/ssh.socket
|
|
${{ env.DOCKER_GPU_OPTION }}
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler
|
|
rm -rf /build/*
|
|
pip install pytest
|
|
sed "s/pytest/python -m pytest/g" -i Makefile
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build all build-end-to-end-dataflow-tests
|
|
echo "Debug: ccache statistics (after the build):"
|
|
ccache -s
|
|
|
|
- name: Download KeySetCache
|
|
if: ${{ !contains(github.head_ref, 'newkeysetcache') }}
|
|
continue-on-error: true
|
|
run: |
|
|
cd compiler
|
|
GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} make keysetcache_ci_populated
|
|
|
|
- name: Mark KeySetCache
|
|
run: |
|
|
touch keysetcache.timestamp
|
|
|
|
- name: Test compiler
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/KeySetCache:/tmp/KeySetCache
|
|
-v ${{ github.workspace }}/build:/build
|
|
${{ env.DOCKER_GPU_OPTION }}
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler
|
|
pip install pytest
|
|
mkdir -p /tmp/concrete_compiler/gpu_tests/
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build run-tests run-end-to-end-dataflow-tests
|
|
chmod -R ugo+rwx /tmp/KeySetCache
|
|
|
|
- name: Prune KeySetCache
|
|
run: |
|
|
echo "Previous cache size is"
|
|
du -sh KeySetCache
|
|
echo "Cleaning"
|
|
find KeySetCache/* -maxdepth 1 -mindepth 1 -not -newer keysetcache.timestamp -type d -exec rm -vr {} \;
|
|
echo "New cache size is"
|
|
du -sh KeySetCache
|
|
|
|
- name: Upload KeySetCache
|
|
if: ${{ github.ref == 'refs/heads/main' }}
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: KeySetCacheV2
|
|
path: KeySetCache
|
|
retention-days: 90
|
|
|
|
- name: Cleanup Old KeySetCache
|
|
uses: Remagpie/gha-remove-artifact@v1
|
|
if: ${{ github.ref == 'refs/heads/main' }}
|
|
with:
|
|
only-name: KeySetCacheV2
|
|
max-count: 1
|
|
|
|
- name: Build the documentation
|
|
id: build-doc
|
|
if: ${{ steps.build-compiler.outcome == 'success' && !cancelled() }}
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/docs:/docs
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
rm -rf /build
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build concretecompiler python-bindings doc
|
|
cd /docs
|
|
pip install -r requirements.txt
|
|
pip install -r ../llvm-project/mlir/python/requirements.txt
|
|
dnf install -y doxygen
|
|
sed "s/sphinx-apidoc/python -m sphinx.ext.apidoc/g" -i Makefile
|
|
sed "s/sphinx-build/python -m sphinx.cmd.build/g" -i Makefile
|
|
make COMPILER_BUILD_DIR=/build/ doc
|
|
|
|
- name: Archive docs artifacts
|
|
if: ${{ steps.build-doc.outcome == 'success' && !cancelled() }}
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: html-docs
|
|
path: docs/_build/html
|
|
|
|
PublishDoc:
|
|
runs-on: ubuntu-20.04
|
|
needs: BuildAndTest
|
|
if: ${{ startsWith(github.ref_name, 'v') || github.ref_name == 'main' }}
|
|
steps:
|
|
- name: Download Documentation
|
|
id: download
|
|
uses: actions/download-artifact@v3
|
|
with:
|
|
name: html-docs
|
|
|
|
- name: Publish Documentation to S3
|
|
id: publish
|
|
if: ${{ steps.download.outcome == 'success' && !cancelled() }}
|
|
uses: jakejarvis/s3-sync-action@be0c4ab89158cac4278689ebedd8407dd5f35a83
|
|
with:
|
|
args: --acl public-read
|
|
env:
|
|
AWS_S3_BUCKET: ${{ secrets.AWS_PREPROD_REPO_DOCUMENTATION_BUCKET_NAME }}
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
SOURCE_DIR: '.'
|
|
DEST_DIR: 'concrete-compiler/${{ github.ref_name }}'
|
|
|
|
- name: Invalidate CloudFront Cache
|
|
if: ${{ steps.publish.outcome == 'success' }}
|
|
uses: awact/cloudfront-action@8bcfabc7b4bbc0cb8e55e48527f0e3a6d681627c
|
|
env:
|
|
SOURCE_PATH: '/concrete-compiler/*'
|
|
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
DISTRIBUTION_ID: ${{ secrets.AWS_REPO_DOCUMENTATION_DISTRIBUTION_ID }}
|