mirror of
https://github.com/zama-ai/concrete.git
synced 2026-02-09 03:55:04 -05:00
782 lines
30 KiB
YAML
782 lines
30 KiB
YAML
name: Continuous Integration Pipeline
|
|
|
|
on:
|
|
push:
|
|
branches: [main, test-ci]
|
|
tags:
|
|
- 'v*'
|
|
pull_request:
|
|
types: [opened, synchronize, reopened]
|
|
|
|
# DOCKER_IMAGE variables aren't used in BuildAndPushDockerImages because of https://github.com/actions/runner/issues/480
|
|
env:
|
|
DOCKER_IMAGE_TEST: ghcr.io/zama-ai/concrete-compiler
|
|
THIS_FILE: .github/workflows/continuous-integration.yml
|
|
|
|
jobs:
|
|
########################
|
|
# Tests and formating #
|
|
########################
|
|
|
|
FormattingAndLinting:
|
|
runs-on: ubuntu-20.04
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
- name: Format with clang-format (Cpp)
|
|
run: sudo apt install moreutils && .github/workflows/scripts/format_cpp.sh
|
|
- name: Format with cmake-format (Cmake)
|
|
run: pip3 install cmakelang && .github/workflows/scripts/format_cmake.sh
|
|
- name: Format with black (Python)
|
|
run: |
|
|
cd compiler
|
|
pip install -r lib/Bindings/Python/requirements_dev.txt
|
|
make check-python-format
|
|
- name: Lint with pylint (Python)
|
|
run: |
|
|
cd compiler
|
|
# compiler requirements to lint
|
|
pip install numpy
|
|
make python-lint
|
|
- name: Format with rustfmt (Rust)
|
|
run: |
|
|
cd compiler
|
|
make check-rust-format
|
|
- name: Linelint
|
|
uses: fernandrone/linelint@0.0.4
|
|
id: linelint
|
|
|
|
CheckLicense:
|
|
runs-on: ubuntu-20.04
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
- name: Check if sources include the license header
|
|
run: .github/workflows/scripts/check_for_license.sh
|
|
|
|
BuildAndTest:
|
|
runs-on: ubuntu-20.04
|
|
# We want to always run the tests unless cancelled, but after the docker image build job.
|
|
# If there is no new build then we use the old image, else we use the new image.
|
|
if: ${{ !cancelled() }}
|
|
needs: [BuildAndPushDockerImages]
|
|
steps:
|
|
# Free 4Gb of workspace
|
|
- name: Freeing space
|
|
run: |
|
|
df -h
|
|
for image in ubuntu:{16,18}.04 \
|
|
node:{12,14,16}{-alpine,} \
|
|
buildpack-deps:{stretch,buster,bullseye} \
|
|
debian:{9,10,11} alpine:3.{12,13,14} \
|
|
moby/buildkit:latest docker:20.10
|
|
do
|
|
docker image rm $image || echo Please clean remove it from this step
|
|
done
|
|
df -h
|
|
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.5.2
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Create build dir
|
|
run: mkdir build
|
|
|
|
- name: Set up ssh auth in docker
|
|
run: |
|
|
echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}"
|
|
|
|
- name: Build compiler
|
|
uses: addnab/docker-run-action@v3
|
|
id: build-compiler
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/build:/build
|
|
-v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket
|
|
-e SSH_AUTH_SOCK=/ssh.socket
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler/concrete-optimizer
|
|
cargo build --release -p concrete-optimizer-cpp
|
|
cd /compiler
|
|
rm -rf /build/*
|
|
pip install pytest
|
|
sed "s/pytest/python -m pytest/g" -i Makefile
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build all build-end-to-end-dataflow-tests
|
|
echo "Debug: ccache statistics (after the build):"
|
|
ccache -s
|
|
|
|
- name: Test compiler
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/build:/build
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler
|
|
pip install pytest
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build run-tests run-end-to-end-dataflow-tests run-rust-tests
|
|
|
|
- name: Build the documentation
|
|
id: build-doc
|
|
if: ${{ steps.build-compiler.outcome == 'success' && !cancelled() }}
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/docs:/docs
|
|
-v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket
|
|
-e SSH_AUTH_SOCK=/ssh.socket
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
rm -rf /build
|
|
make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build concretecompiler python-bindings doc
|
|
cd /docs
|
|
pip install -r requirements.txt
|
|
pip install -r ../llvm-project/mlir/python/requirements.txt
|
|
dnf install -y doxygen
|
|
sed "s/sphinx-apidoc/python -m sphinx.ext.apidoc/g" -i Makefile
|
|
sed "s/sphinx-build/python -m sphinx.cmd.build/g" -i Makefile
|
|
make COMPILER_BUILD_DIR=/build/ doc
|
|
|
|
- name: Archive docs artifacts
|
|
if: ${{ steps.build-doc.outcome == 'success' && !cancelled() }}
|
|
uses: actions/upload-artifact@v3
|
|
with:
|
|
name: html-docs
|
|
path: docs/_build/html
|
|
|
|
BuildAndTestMacOS:
|
|
runs-on: macos-11
|
|
steps:
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.6.0
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Install rust
|
|
uses: actions-rs/toolchain@v1
|
|
with:
|
|
toolchain: stable
|
|
|
|
- name: Install Deps
|
|
run: |
|
|
brew install ninja ccache
|
|
pip3.10 install numpy pybind11==2.8 wheel delocate
|
|
pip3.10 install pytest
|
|
|
|
- name: Cache compilation (push)
|
|
if: github.event_name == 'push'
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: /Users/runner/Library/Caches/ccache
|
|
key: ${{ runner.os }}-compilation-cache-${{ github.sha }}
|
|
restore-keys: |
|
|
${{ runner.os }}-compilation-cache-
|
|
|
|
- name: Cache compilation (pull_request)
|
|
if: github.event_name == 'pull_request'
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: /Users/runner/Library/Caches/ccache
|
|
key: ${{ runner.os }}-compilation-cache-${{ github.event.pull_request.base.sha }}
|
|
restore-keys: |
|
|
${{ runner.os }}-compilation-cache-
|
|
|
|
- name: Get tmpdir path
|
|
if: github.event_name == 'push'
|
|
id: tmpdir-path
|
|
run: echo "::set-output name=TMPDIR_PATH::$TMPDIR"
|
|
|
|
# We do run run-check-tests as part of the build, as they aren't that costly
|
|
# and will at least give minimum confidence that the compiler works in PRs
|
|
- name: Build
|
|
run: |
|
|
set -e
|
|
cd compiler
|
|
echo "Debug: ccache statistics (prior to the build):"
|
|
ccache -s
|
|
make Python3_EXECUTABLE=$(which python3.10) all run-check-tests
|
|
echo "Debug: ccache statistics (after the build):"
|
|
ccache -s
|
|
|
|
- name: Test
|
|
if: github.event_name == 'push'
|
|
run: |
|
|
set -e
|
|
cd compiler
|
|
echo "Debug: ccache statistics (prior to the tests):"
|
|
ccache -s
|
|
export CONCRETE_COMPILER_DATAFLOW_EXECUTION_ENABLED=OFF
|
|
pip3.10 wheel --no-deps -w ${{ github.workspace }}/wheels .
|
|
delocate-wheel -v $(find ${{ github.workspace }}/wheels/ -name '*macosx*.whl')
|
|
pip3.10 install $(find ${{ github.workspace }}/wheels/ -name '*macosx*.whl')
|
|
make Python3_EXECUTABLE=$(which python3.10) run-tests
|
|
echo "Debug: ccache statistics (after the tests):"
|
|
ccache -s
|
|
|
|
BlockMerge:
|
|
if: github.event_name == 'pull_request'
|
|
runs-on: ubuntu-20.04
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- name: Check Commit to Squash
|
|
run: |
|
|
set -e
|
|
git log origin/${{ github.base_ref }}..origin/${{ github.head_ref }} --format=%s | ( ! grep -e "^f [0-9a-f]\+" -q )
|
|
|
|
##################################
|
|
# Releasing and Testing Packages #
|
|
##################################
|
|
|
|
PublishDoc:
|
|
needs: [BuildAndTest]
|
|
|
|
runs-on: ubuntu-20.04
|
|
if: ${{ github.event_name == 'push' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') }}
|
|
|
|
steps:
|
|
- name: Set env
|
|
id: vars
|
|
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> "${GITHUB_ENV}"
|
|
- name: Download Documentation
|
|
id: download
|
|
uses: actions/download-artifact@v3
|
|
with:
|
|
name: html-docs
|
|
- name: Publish Documentation to S3
|
|
id: publish
|
|
if: ${{ steps.download.outcome == 'success' && !cancelled() }}
|
|
uses: jakejarvis/s3-sync-action@be0c4ab89158cac4278689ebedd8407dd5f35a83
|
|
with:
|
|
args: --acl public-read
|
|
env:
|
|
AWS_S3_BUCKET: ${{ secrets.AWS_PREPROD_REPO_DOCUMENTATION_BUCKET_NAME }}
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
SOURCE_DIR: '.'
|
|
DEST_DIR: 'concrete-compiler/${{ env.RELEASE_VERSION }}'
|
|
- name: Invalidate CloudFront Cache
|
|
if: ${{ steps.publish.outcome == 'success' }}
|
|
uses: awact/cloudfront-action@8bcfabc7b4bbc0cb8e55e48527f0e3a6d681627c
|
|
env:
|
|
SOURCE_PATH: '/concrete-compiler/*'
|
|
AWS_REGION: ${{ secrets.AWS_REGION }}
|
|
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
|
DISTRIBUTION_ID: ${{ secrets.AWS_REPO_DOCUMENTATION_DISTRIBUTION_ID }}
|
|
|
|
CreateRelease:
|
|
runs-on: ubuntu-20.04
|
|
needs: [BuildAndTest, BuildAndTestMacOS]
|
|
if: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') }}
|
|
outputs:
|
|
upload_url: ${{ steps.release.outputs.upload_url }}
|
|
release_id: ${{ steps.release.outputs.id }}
|
|
steps:
|
|
- name: Release
|
|
id: release
|
|
uses: softprops/action-gh-release@v1
|
|
with:
|
|
token: ${{ secrets.GH_TOKEN_RELEASE }}
|
|
draft: true
|
|
prerelease: true
|
|
generate_release_notes: true
|
|
|
|
BuildAndPushPythonPackagesLinux:
|
|
runs-on: ubuntu-20.04
|
|
strategy:
|
|
matrix:
|
|
include:
|
|
- python: 37
|
|
python_dir: "cp37-cp37m"
|
|
- python: 38
|
|
python_dir: "cp38-cp38"
|
|
- python: 39
|
|
python_dir: "cp39-cp39"
|
|
- python: 310
|
|
python_dir: "cp310-cp310"
|
|
outputs:
|
|
python-package-name-linux-py37: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY37 }}
|
|
python-package-name-linux-py38: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY38 }}
|
|
python-package-name-linux-py39: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY39 }}
|
|
python-package-name-linux-py310: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY310 }}
|
|
needs: CreateRelease
|
|
steps:
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.5.2
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Update Python Version
|
|
run: cd compiler && make update-python-version
|
|
|
|
- name: Login to Github Container Registry
|
|
run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io
|
|
|
|
- name: Set up ssh auth in docker
|
|
run: |
|
|
echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}"
|
|
|
|
- name: Build Wheel
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/wheels:/wheels
|
|
-v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket
|
|
-e SSH_AUTH_SOCK=/ssh.socket
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler
|
|
rm -rf /build
|
|
export PYTHON_EXEC=/opt/python/${{ matrix.python_dir }}/bin/python
|
|
$PYTHON_EXEC -m pip install -r /llvm-project/mlir/python/requirements.txt
|
|
# setup env variable for wheel building
|
|
export CONCRETE_COMPILER_Python3_EXECUTABLE=$PYTHON_EXEC
|
|
export CONCRETE_COMPILER_BUILD_DIR=/build
|
|
/opt/python/${{ matrix.python_dir }}/bin/pip wheel -vvv --no-deps -w /wheels .
|
|
# We need to run it twice: the first will generate the directories, so that
|
|
# the second run can find the packages via find_namespace_packages
|
|
/opt/python/${{ matrix.python_dir }}/bin/pip wheel -vvv --no-deps -w /wheels .
|
|
auditwheel repair /wheels/*.whl --plat manylinux_2_28_x86_64 -w /wheels
|
|
echo "Debug: ccache statistics (after the build):"
|
|
ccache -s
|
|
|
|
- name: Set Outputs
|
|
id: set-output-wheel-linux
|
|
run: |
|
|
echo "::set-output name=ASSET_NAME::$(find ${{ github.workspace }}/wheels/ -name '*manylinux*.whl' | rev |cut -d "/" -f 1 |rev )"
|
|
# used later for python package test
|
|
echo "::set-output name=ASSET_NAME_PY${{ matrix.python }}::$(find ${{ github.workspace }}/wheels/ -name '*manylinux*.whl' | rev |cut -d "/" -f 1 |rev )"
|
|
|
|
- name: Upload Python Package
|
|
uses: actions/upload-release-asset@v1
|
|
env:
|
|
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }}
|
|
with:
|
|
upload_url: ${{ needs.CreateRelease.outputs.upload_url }}
|
|
asset_path: ${{ github.workspace }}/wheels/${{ steps.set-output-wheel-linux.outputs.ASSET_NAME }}
|
|
asset_name: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME }}
|
|
asset_content_type: application/zip
|
|
|
|
BuildAndPushTarballLinux:
|
|
runs-on: ubuntu-20.04
|
|
needs: CreateRelease
|
|
steps:
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.6.0
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Login to Github Container Registry
|
|
run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io
|
|
|
|
- name: Build Tarball
|
|
uses: addnab/docker-run-action@v3
|
|
with:
|
|
registry: ghcr.io
|
|
image: ${{ env.DOCKER_IMAGE_TEST }}
|
|
username: ${{ secrets.GHCR_LOGIN }}
|
|
password: ${{ secrets.GHCR_PASSWORD }}
|
|
options: >-
|
|
-v ${{ github.workspace }}/llvm-project:/llvm-project
|
|
-v ${{ github.workspace }}/compiler:/compiler
|
|
-v ${{ github.workspace }}/tarballs:/tarballs
|
|
-v ${{ github.workspace }}/.github/workflows/assets/Installation.md:/Installation.md
|
|
shell: bash
|
|
run: |
|
|
set -e
|
|
cd /compiler
|
|
rm -rf /build
|
|
make BINDINGS_PYTHON_ENABLED=OFF BUILD_DIR=/build INSTALL_PREFIX=/tarballs/ install
|
|
echo "Debug: ccache statistics (after the build):"
|
|
ccache -s
|
|
# package installation file and make tarball
|
|
cp /Installation.md /tarballs/concretecompiler/
|
|
cd /tarballs && tar -czvf concretecompiler.tar.gz concretecompiler
|
|
|
|
- name: Tag Tarball
|
|
id: tag-tarball
|
|
run: |
|
|
TAG="$(git describe --tags --abbrev=0)"
|
|
|
|
sudo cp "${{ github.workspace }}/tarballs/concretecompiler.tar.gz" "${{ github.workspace }}/tarballs/concretecompiler-${TAG}-x86_64-linux-gnu.tar.gz"
|
|
echo "::set-output name=ASSET_NAME::concretecompiler-${TAG}-x86_64-linux-gnu.tar.gz"
|
|
|
|
- name: Upload Tarball
|
|
uses: actions/upload-release-asset@v1
|
|
env:
|
|
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }}
|
|
with:
|
|
upload_url: ${{ needs.CreateRelease.outputs.upload_url }}
|
|
asset_path: ${{ github.workspace }}/tarballs/${{ steps.tag-tarball.outputs.ASSET_NAME }}
|
|
asset_name: ${{ steps.tag-tarball.outputs.ASSET_NAME }}
|
|
asset_content_type: application/tar+gzip
|
|
|
|
BuildAndPushPackagesMacOS:
|
|
needs: CreateRelease
|
|
runs-on: macos-11
|
|
strategy:
|
|
matrix:
|
|
python: ['3.8', '3.9', '3.10']
|
|
outputs:
|
|
python-package-name-macos-py38: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY38 }}
|
|
python-package-name-macos-py39: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY39 }}
|
|
python-package-name-macos-py310: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY310 }}
|
|
steps:
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.5.0
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Install Rust
|
|
uses: actions-rs/toolchain@v1
|
|
with:
|
|
toolchain: stable
|
|
|
|
- name: Concrete-Optimizer
|
|
run: |
|
|
cd compiler
|
|
make concrete-optimizer-lib
|
|
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v4
|
|
with:
|
|
python-version: ${{ matrix.python }}
|
|
|
|
- name: Install Deps
|
|
run: |
|
|
brew install ninja ccache
|
|
pip install numpy pybind11==2.8 wheel delocate
|
|
|
|
- name: Update Python Version
|
|
run: cd compiler && make update-python-version
|
|
|
|
- name: Use Compilation Cache
|
|
uses: actions/cache@v3
|
|
with:
|
|
path: /Users/runner/Library/Caches/ccache
|
|
key: ${{ runner.os }}-compilation-cache-${{ github.sha }}
|
|
restore-keys: |
|
|
${{ runner.os }}-compilation-cache-
|
|
|
|
- name: Build
|
|
id: build-wheel-macos
|
|
run: |
|
|
cd compiler
|
|
make Python3_EXECUTABLE=$(which python) DATAFLOW_EXECUTION_ENABLED=OFF python-bindings
|
|
export CONCRETE_COMPILER_DATAFLOW_EXECUTION_ENABLED=OFF
|
|
pip wheel --no-deps -w ${{ github.workspace }}/wheels .
|
|
delocate-wheel -v $(find ${{ github.workspace }}/wheels/ -name '*macosx*.whl')
|
|
echo "::set-output name=ASSET_NAME::$(find ${{ github.workspace }}/wheels/ -name '*macosx*.whl' | rev |cut -d "/" -f 1 |rev )"
|
|
# used later for python package test
|
|
echo "::set-output name=ASSET_NAME_PY$(echo ${{ matrix.python }} |tr -d '.')::$(find ${{ github.workspace }}/wheels/ -name '*macosx*.whl' | rev |cut -d "/" -f 1 |rev )"
|
|
|
|
- name: Upload Python Package
|
|
uses: actions/upload-release-asset@v1
|
|
env:
|
|
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }}
|
|
with:
|
|
upload_url: ${{ needs.CreateRelease.outputs.upload_url }}
|
|
asset_path: ${{ github.workspace }}/wheels/${{ steps.build-wheel-macos.outputs.ASSET_NAME }}
|
|
asset_name: ${{ steps.build-wheel-macos.outputs.ASSET_NAME }}
|
|
asset_content_type: application/zip
|
|
|
|
- name: Build tarball
|
|
if: matrix.python == '3.8'
|
|
id: build-mac-tarball
|
|
run: |
|
|
cd compiler
|
|
make concretecompiler
|
|
mkdir -p tarballs/concretecompiler/lib tarballs/concretecompiler/bin
|
|
cp build/bin/concretecompiler tarballs/concretecompiler/bin
|
|
cp build/lib/libConcretelangRuntime.dylib tarballs/concretecompiler/lib
|
|
cp ../.github/workflows/assets/Installation.md tarballs/concretecompiler/
|
|
|
|
TAG=$(git describe --tags --abbrev=0)
|
|
|
|
cd tarballs && tar -czvf "concretecompiler-${TAG}-x86_64-macos-catalina.tar.gz" concretecompiler
|
|
echo "::set-output name=ASSET_NAME::concretecompiler-${TAG}-x86_64-macos-catalina.tar.gz"
|
|
|
|
- name: Upload Tarball
|
|
if: matrix.python == '3.8'
|
|
uses: actions/upload-release-asset@v1
|
|
env:
|
|
GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }}
|
|
with:
|
|
upload_url: ${{ needs.CreateRelease.outputs.upload_url }}
|
|
asset_path: ${{ github.workspace }}/compiler/tarballs/${{ steps.build-mac-tarball.outputs.ASSET_NAME }}
|
|
asset_name: ${{ steps.build-mac-tarball.outputs.ASSET_NAME }}
|
|
asset_content_type: application/tar+gzip
|
|
|
|
TestPythonPackageLinux:
|
|
runs-on: ubuntu-20.04
|
|
needs: [BuildAndPushPythonPackagesLinux, CreateRelease]
|
|
strategy:
|
|
matrix:
|
|
include:
|
|
- python: '3.7'
|
|
filename-index: 'python-package-name-linux-py37'
|
|
- python: '3.8'
|
|
filename-index: 'python-package-name-linux-py38'
|
|
- python: '3.9'
|
|
filename-index: 'python-package-name-linux-py39'
|
|
- python: '3.10'
|
|
filename-index: 'python-package-name-linux-py310'
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v4
|
|
with:
|
|
python-version: ${{ matrix.python }}
|
|
|
|
- name: Extract Package Filename
|
|
id: extract-filename
|
|
run: echo "::set-output name=FILE_NAME::$(echo '${{ toJson(needs.BuildAndPushPythonPackagesLinux.outputs) }}' | jq '.["${{ matrix.filename-index }}"]' | tr -d '\"' )"
|
|
|
|
- name: Download and Install Package
|
|
run: |
|
|
FILE_NAME=$(curl -s -u "zama-bot:${{ secrets.GH_TOKEN_RELEASE }}" \
|
|
https://api.github.com/repos/${{ github.repository }}/releases | \
|
|
jq 'map(select(.tag_name == "${{ github.ref_name }}"))' | \
|
|
jq '.[0].assets' | \
|
|
jq 'map(select(.name == "${{ steps.extract-filename.outputs.FILE_NAME }}" ))' | \
|
|
jq '.[].id')
|
|
|
|
wget --auth-no-challenge --header='Accept:application/octet-stream' \
|
|
"https://${{ secrets.GH_TOKEN_RELEASE }}:@api.github.com/repos/${{ github.repository }}/releases/assets/${FILE_NAME}" \
|
|
-O ${{ steps.extract-filename.outputs.FILE_NAME }}
|
|
pip install ${{ steps.extract-filename.outputs.FILE_NAME }}
|
|
|
|
- name: Test
|
|
run: |
|
|
cd compiler
|
|
pip install pytest
|
|
pytest -vs tests/python
|
|
|
|
TestPythonPackageMacOS:
|
|
runs-on: macos-11
|
|
needs: [BuildAndPushPackagesMacOS, CreateRelease]
|
|
env:
|
|
SYSTEM_VERSION_COMPAT: 0
|
|
strategy:
|
|
matrix:
|
|
include:
|
|
- python: '3.8'
|
|
filename-index: 'python-package-name-macos-py38'
|
|
- python: '3.9'
|
|
filename-index: 'python-package-name-macos-py39'
|
|
- python: '3.10'
|
|
filename-index: 'python-package-name-macos-py310'
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
|
|
- name: Set up Python
|
|
uses: actions/setup-python@v4
|
|
with:
|
|
python-version: ${{ matrix.python }}
|
|
|
|
- name: Extract Package Filename
|
|
id: extract-filename
|
|
run: echo "::set-output name=FILE_NAME::$(echo '${{ toJson(needs.BuildAndPushPackagesMacOS.outputs) }}' | jq '.["${{ matrix.filename-index }}"]' | tr -d '\"' )"
|
|
|
|
- name: Download and Install Package
|
|
run: |
|
|
FILE_NAME=$(curl -s -u "zama-bot:${{ secrets.GH_TOKEN_RELEASE }}" \
|
|
https://api.github.com/repos/${{ github.repository }}/releases | \
|
|
jq 'map(select(.tag_name == "${{ github.ref_name }}"))' | \
|
|
jq '.[0].assets' | \
|
|
jq 'map(select(.name == "${{ steps.extract-filename.outputs.FILE_NAME }}" ))' | \
|
|
jq '.[].id')
|
|
|
|
wget --auth-no-challenge --header='Accept:application/octet-stream' \
|
|
"https://${{ secrets.GH_TOKEN_RELEASE }}:@api.github.com/repos/${{ github.repository }}/releases/assets/${FILE_NAME}" \
|
|
-O ${{ steps.extract-filename.outputs.FILE_NAME }}
|
|
pip install ${{ steps.extract-filename.outputs.FILE_NAME }}
|
|
|
|
- name: Test
|
|
run: |
|
|
cd compiler
|
|
pip install pytest
|
|
pytest -vs -m "not parallel" tests/python
|
|
|
|
#################
|
|
# Docker Images #
|
|
#################
|
|
|
|
BuildAndPushDockerImages:
|
|
if: ${{ github.event_name == 'push' && (startsWith(github.ref, 'refs/tags/v') || github.ref == 'refs/heads/main') || needs.BuildAndPublishHPXDockerImage.outputs.image_built == 'true' || needs.BuildAndPublishCUDADockerImage.outputs.image_built == 'true' }}
|
|
needs: [BuildAndPublishHPXDockerImage, BuildAndPublishCUDADockerImage]
|
|
name: Build & Publish Docker Images
|
|
runs-on: ubuntu-20.04
|
|
strategy:
|
|
matrix:
|
|
include:
|
|
- name: test-env
|
|
image: ghcr.io/zama-ai/concrete-compiler
|
|
dockerfile: builders/Dockerfile.concrete-compiler-env
|
|
|
|
steps:
|
|
# A SSH private key is required as some dependencies are from private repos
|
|
- uses: webfactory/ssh-agent@v0.6.0
|
|
with:
|
|
ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }}
|
|
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
submodules: recursive
|
|
token: ${{ secrets.GH_TOKEN }}
|
|
|
|
- name: Login to Registry
|
|
run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io
|
|
|
|
- name: Set up ssh auth in docker
|
|
run: |
|
|
echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}"
|
|
|
|
# label was initially a need from the frontend CI
|
|
- name: Build Image
|
|
run: |
|
|
docker image build --no-cache \
|
|
-v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket \
|
|
-e SSH_AUTH_SOCK=/ssh.socket \
|
|
--label "commit-sha=${{ github.sha }}" -t ${{ matrix.image }} -f ${{ matrix.dockerfile }} .
|
|
|
|
- name: Tag and Publish Image
|
|
run: |
|
|
docker image tag ${{ matrix.image }} ${{ matrix.image }}:${{ github.sha }}
|
|
docker image push ${{ matrix.image }}:latest
|
|
docker image push ${{ matrix.image }}:${{ github.sha }}
|
|
|
|
- name: Tag and Publish Release Image
|
|
if: startsWith(github.ref, 'refs/tags/v')
|
|
run: |
|
|
docker image tag ${{ matrix.image }} ${{ matrix.image }}:${{ github.ref_name }}
|
|
docker image push ${{ matrix.image }}:${{ github.ref_name }}
|
|
|
|
BuildAndPublishHPXDockerImage:
|
|
name: Build & Publish HPX Docker Image
|
|
runs-on: ubuntu-20.04
|
|
env:
|
|
IMAGE: ghcr.io/zama-ai/hpx
|
|
outputs:
|
|
image_built: ${{ steps.is-built.outputs.IMAGE_BUILT }}
|
|
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- name: Get changed files
|
|
id: changed-files
|
|
uses: tj-actions/changed-files@v32
|
|
|
|
- name: Login
|
|
id: login
|
|
if: contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.hpx-env') || contains(steps.changed-files.outputs.modified_files, env.THIS_FILE)
|
|
run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io
|
|
|
|
- name: Build Tag and Publish
|
|
if: ${{ steps.login.conclusion != 'skipped' }}
|
|
run: |
|
|
docker build -t "${IMAGE}" -f builders/Dockerfile.hpx-env .
|
|
docker push "${IMAGE}:latest"
|
|
|
|
- name: Is Image Built
|
|
id: is-built
|
|
run: echo "::set-output name=IMAGE_BUILT::${{ contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.hpx-env') }}"
|
|
|
|
BuildAndPublishCUDADockerImage:
|
|
name: Build & Publish CUDA Docker Image
|
|
runs-on: ubuntu-20.04
|
|
env:
|
|
IMAGE: ghcr.io/zama-ai/cuda
|
|
TAG: "11-7"
|
|
outputs:
|
|
image_built: ${{ steps.is-built.outputs.IMAGE_BUILT }}
|
|
|
|
steps:
|
|
- uses: actions/checkout@v3
|
|
with:
|
|
fetch-depth: 0
|
|
|
|
- name: Get changed files
|
|
id: changed-files
|
|
uses: tj-actions/changed-files@v32
|
|
|
|
- name: Login
|
|
id: login
|
|
if: contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.cuda-env') || contains(steps.changed-files.outputs.modified_files, env.THIS_FILE)
|
|
run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io
|
|
|
|
- name: Build Tag and Publish
|
|
if: ${{ steps.login.conclusion != 'skipped' }}
|
|
run: |
|
|
docker build -t "${IMAGE}" -f builders/Dockerfile.cuda-env .
|
|
docker image tag "${IMAGE}" "${IMAGE}:${TAG}"
|
|
docker push "${IMAGE}:latest"
|
|
docker push "${IMAGE}:${TAG}"
|
|
|
|
- name: Is Image Built
|
|
id: is-built
|
|
run: echo "::set-output name=IMAGE_BUILT::${{ contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.cuda-env') }}"
|