mirror of
https://github.com/zama-ai/concrete.git
synced 2026-02-08 19:44:57 -05:00
chore: prepare release workflow
- disable macOS builds for release for now as we don't know how long they are closes #246 closes #809
This commit is contained in:
99
.github/workflows/continuous-integration.yaml
vendored
99
.github/workflows/continuous-integration.yaml
vendored
@@ -278,6 +278,23 @@ jobs:
|
||||
name: changelog
|
||||
path: ${{ steps.changelog.outputs.changelog-file }}
|
||||
|
||||
# Create packages before tests, to be able to get them if some unexpected test failure happens
|
||||
# Build the package only once, as we don't have binary dependency this can be used on Linux
|
||||
# and macOS as long as the dependencies are available
|
||||
- name: Build wheel
|
||||
id: build-wheel
|
||||
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.conformance.outcome == 'success' && !cancelled() }}
|
||||
run: |
|
||||
rm -rf dist
|
||||
poetry build -f wheel
|
||||
|
||||
- name: Upload wheel artifact
|
||||
if: ${{ fromJSON(env.IS_REF_BUILD) && steps.build-wheel.outcome == 'success' }}
|
||||
uses: actions/upload-artifact@82c141cc518b40d92cc801eee768e7aafc9c2fa2
|
||||
with:
|
||||
name: py3-wheel
|
||||
path: dist/*.whl
|
||||
|
||||
- name: PyTest Source Code
|
||||
id: pytest
|
||||
if: ${{ steps.conformance.outcome == 'success' && !cancelled() }}
|
||||
@@ -396,10 +413,11 @@ jobs:
|
||||
PATH="/usr/local/opt/make/libexec/gnubin:$PATH"
|
||||
echo "PATH=${PATH}" >> "$GITHUB_ENV"
|
||||
|
||||
which make
|
||||
make setup_env
|
||||
|
||||
poetry run python --version
|
||||
- name: PyTest Source Code
|
||||
run: |
|
||||
make pytest
|
||||
|
||||
weekly-pip-audit:
|
||||
if: ${{ github.event_name == 'schedule' }}
|
||||
@@ -607,36 +625,6 @@ jobs:
|
||||
|
||||
echo "RELEASE_IMG_TAGS_TO_PUSH=${RELEASE_IMG_TAGS_TO_PUSH}" >> "$GITHUB_ENV"
|
||||
|
||||
# Disabled buildx for now as we are seeing a lot of fails on layer pushes
|
||||
# - name: Set up Docker Buildx
|
||||
# id: buildx
|
||||
# uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.BOT_USERNAME }}
|
||||
password: ${{ secrets.BOT_TOKEN }}
|
||||
|
||||
- name: Build concrete-numpy Image
|
||||
if: ${{ success() && !cancelled() }}
|
||||
uses: docker/build-push-action@a66e35b9cbcf4ad0ea91ffcaf7bbad63ad9e0229
|
||||
with:
|
||||
context: .
|
||||
# builder: ${{ steps.buildx.outputs.name }}
|
||||
file: docker/Dockerfile.release
|
||||
load: true
|
||||
push: false
|
||||
tags: "${{ env.RELEASE_IMG_TAGS_TO_PUSH }}"
|
||||
no-cache: true
|
||||
|
||||
- name: Release image sanity check
|
||||
if: ${{ success() && !cancelled() }}
|
||||
run: |
|
||||
echo "Running sanity check for ${RELEASE_IMG_GIT_TAG}"
|
||||
docker run --rm -v "$(pwd)"/docker/release_resources:/data \
|
||||
"${RELEASE_IMG_GIT_TAG}" /bin/bash -c "python ./sanity_check.py"
|
||||
|
||||
- name: Create directory for artifacts
|
||||
if: ${{ success() && !cancelled() }}
|
||||
run: |
|
||||
@@ -659,7 +647,7 @@ jobs:
|
||||
- name: Untar docs artifacts
|
||||
if: ${{ success() && !cancelled() }}
|
||||
run: |
|
||||
cd ${{ env.ARTIFACTS_RAW_DIR }}/html_docs/
|
||||
cd ${{ steps.download-docs.outputs.download-path }}
|
||||
tar -xvf docs.tar
|
||||
rm docs.tar
|
||||
|
||||
@@ -671,6 +659,44 @@ jobs:
|
||||
name: changelog
|
||||
path: ${{ env.ARTIFACTS_RAW_DIR }}/changelog/
|
||||
|
||||
- name: Download python3 wheel
|
||||
if: ${{ success() && !cancelled() }}
|
||||
id: download-wheel
|
||||
uses: actions/download-artifact@f023be2c48cc18debc3bacd34cb396e0295e2869
|
||||
with:
|
||||
name: py3-wheel
|
||||
path: ${{ env.ARTIFACTS_PACKAGED_DIR }}/
|
||||
|
||||
- name: Copy wheel to docker build context
|
||||
run: |
|
||||
mkdir -p ./pkg
|
||||
cp "${{ env.ARTIFACTS_PACKAGED_DIR }}"/*.whl ./pkg
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@42d299face0c5c43a0487c477f595ac9cf22f1a7
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.BOT_USERNAME }}
|
||||
password: ${{ secrets.BOT_TOKEN }}
|
||||
|
||||
- name: Build concrete-numpy Image
|
||||
if: ${{ success() && !cancelled() }}
|
||||
uses: docker/build-push-action@a66e35b9cbcf4ad0ea91ffcaf7bbad63ad9e0229
|
||||
with:
|
||||
context: .
|
||||
file: docker/Dockerfile.release
|
||||
load: true
|
||||
push: false
|
||||
tags: "${{ env.RELEASE_IMG_TAGS_TO_PUSH }}"
|
||||
no-cache: true
|
||||
|
||||
- name: Release image sanity check
|
||||
if: ${{ success() && !cancelled() }}
|
||||
run: |
|
||||
echo "Running sanity check for ${RELEASE_IMG_GIT_TAG}"
|
||||
docker run --rm -v "$(pwd)"/docker/release_resources:/data \
|
||||
"${RELEASE_IMG_GIT_TAG}" /bin/bash -c "python ./sanity_check.py"
|
||||
|
||||
- name: Prepare docs push
|
||||
id: docs-push-infos
|
||||
run: |
|
||||
@@ -754,6 +780,13 @@ jobs:
|
||||
run: |
|
||||
docker image push --all-tags "${RELEASE_IMAGE_BASE}"
|
||||
|
||||
- name: Push package to PyPi
|
||||
if: ${{ success() && !cancelled() && !fromJSON(env.IS_PRERELEASE) }}
|
||||
run: |
|
||||
twine upload \
|
||||
-u __token__ -p ${{ secrets.PYPI_BOT_TOKEN }} \
|
||||
-r pypi "${{ env.ARTIFACTS_PACKAGED_DIR }}"/*.whl
|
||||
|
||||
- name: Push release documentation
|
||||
if: ${{ success() && !cancelled() }}
|
||||
env:
|
||||
|
||||
@@ -1,28 +1,13 @@
|
||||
FROM ghcr.io/zama-ai/zamalang-compiler:a9fae4c19b96ee61c7ea0a2ce26b1cd8d049e159 as builder
|
||||
|
||||
RUN apt-get update && apt-get upgrade --no-install-recommends -y && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
python3.8 \
|
||||
python-is-python3 && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
python3 -m pip install --no-cache-dir --upgrade pip wheel setuptools && \
|
||||
python3 -m pip install --no-cache-dir poetry
|
||||
|
||||
WORKDIR /build
|
||||
COPY concrete ./concrete
|
||||
COPY pyproject.toml ./pyproject.toml
|
||||
|
||||
RUN poetry build --format wheel
|
||||
|
||||
FROM ghcr.io/zama-ai/zamalang-compiler:a9fae4c19b96ee61c7ea0a2ce26b1cd8d049e159
|
||||
FROM ubuntu:20.04
|
||||
|
||||
RUN mkdir /pkg && mkdir /app
|
||||
WORKDIR /pkg
|
||||
COPY --from=builder /build/dist/*.whl .
|
||||
COPY docker/release_resources/release_requirements.txt .
|
||||
COPY ./pkg/*.whl .
|
||||
|
||||
RUN apt-get update && apt-get upgrade --no-install-recommends -y && \
|
||||
apt-get install --no-install-recommends -y \
|
||||
python3-pip \
|
||||
python3.8 \
|
||||
python3.8-tk \
|
||||
python-is-python3 \
|
||||
@@ -33,8 +18,6 @@ RUN apt-get update && apt-get upgrade --no-install-recommends -y && \
|
||||
python3 -m pip install --no-cache-dir ./*.whl && \
|
||||
python3 -m pip install --no-cache-dir -r release_requirements.txt
|
||||
|
||||
ENV LD_PRELOAD=${RT_LIB}:${LD_PRELOAD}
|
||||
|
||||
WORKDIR /app
|
||||
COPY docker/release_resources/entry_point.sh ./entry_point.sh
|
||||
RUN mkdir /data
|
||||
|
||||
@@ -1,12 +1,6 @@
|
||||
# Ignore all
|
||||
**
|
||||
|
||||
# Not our sources
|
||||
!concrete
|
||||
!pyproject.toml
|
||||
!docker/release_resources/entry_point.sh
|
||||
!docker/release_resources/release_requirements.txt
|
||||
|
||||
# But still ignore pycache
|
||||
**/__pycache__
|
||||
**/*.pyc
|
||||
!pkg/
|
||||
!pkg/**
|
||||
|
||||
@@ -8,14 +8,16 @@ def main():
|
||||
return x + 42
|
||||
|
||||
n_bits = 3
|
||||
x = hnp.EncryptedScalar(hnp.UnsignedInteger(n_bits))
|
||||
|
||||
engine = hnp.compile_numpy_function(
|
||||
compiler = hnp.NPFHECompiler(
|
||||
function_to_compile,
|
||||
{"x": x},
|
||||
[(i,) for i in range(2 ** n_bits)],
|
||||
{"x": "encrypted"},
|
||||
)
|
||||
|
||||
print("Compiling...")
|
||||
|
||||
engine = compiler.compile_on_inputset(range(2 ** n_bits))
|
||||
|
||||
inputs = []
|
||||
labels = []
|
||||
for _ in range(4):
|
||||
@@ -25,12 +27,15 @@ def main():
|
||||
labels.append(function_to_compile(*inputs[-1]))
|
||||
|
||||
correct = 0
|
||||
for input_i, label_i in zip(inputs, labels):
|
||||
for idx, (input_i, label_i) in enumerate(zip(inputs, labels), 1):
|
||||
print(f"Inference #{idx}")
|
||||
result_i = engine.run(*input_i)
|
||||
|
||||
if result_i == label_i:
|
||||
correct += 1
|
||||
|
||||
print(f"{correct}/{len(inputs)}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
2
poetry.lock
generated
2
poetry.lock
generated
@@ -2293,7 +2293,7 @@ full = ["pygraphviz"]
|
||||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.8,<3.10"
|
||||
content-hash = "5b9f4408f601802b92c991083622bc54f2d09a7e829a6b6385aed32dd641c7b8"
|
||||
content-hash = "6d24b64da2afe879eff54d4cc70661ba72f6cf95878f8e2bb9aa75b4ae72f02e"
|
||||
|
||||
[metadata.files]
|
||||
alabaster = [
|
||||
|
||||
@@ -84,6 +84,7 @@ pandas = "1.3.4"
|
||||
pip-audit = "^1.1.1"
|
||||
pytest-codeblocks = "^0.12.2"
|
||||
py-progress-tracker = "^0.3.3"
|
||||
twine = "^3.7.1"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
|
||||
@@ -33,7 +33,9 @@ WEEKLY_CONF = {"os": WEEKLY_OSES, "python": WEEKLY_PYTHON_VERSIONS}
|
||||
# The OSes here are to indicate the OSes used for runners during release
|
||||
RELEASE_OSES = {
|
||||
LINUX: "ubuntu-20.04",
|
||||
MACOS: "macos-10.15",
|
||||
# TODO: https://github.com/zama-ai/concrete-numpy-internal/issues/1340
|
||||
# Re-enable macOS for release once we have the duration of the tests
|
||||
# MACOS: "macos-10.15",
|
||||
}
|
||||
# The python versions will be used to build packages during release
|
||||
RELEASE_PYTHON_VERSIONS = ["3.8", "3.9"]
|
||||
|
||||
Reference in New Issue
Block a user