diff --git a/.github/workflows/assets/Installation.md b/.github/workflows/assets/Installation.md new file mode 100644 index 000000000..da3bcf67c --- /dev/null +++ b/.github/workflows/assets/Installation.md @@ -0,0 +1,11 @@ +# Installation + +You can either install the compiler in user space or globally (you need root/sudo access): + +1. User space install: extract the tarball to a chosen path and make the lib, bin, and include directories accessible depending on your needs. + +2. Global install: extract the tarball to a temporary path , and copy + +- temporary/path/concretecompiler/bin/* inside /usr/local/bin/ (or a directory in $PATH) +- temporary/path/concretecompiler/lib/* inside /usr/local/lib/ (or another lib folder) +- temporary/path/concretecompiler/include/* inside /usr/local/include/ (or another include folder) diff --git a/.github/workflows/auto-cancel.yml b/.github/workflows/auto-cancel.yml new file mode 100644 index 000000000..87770eb32 --- /dev/null +++ b/.github/workflows/auto-cancel.yml @@ -0,0 +1,14 @@ +# https://github.com/styfle/cancel-workflow-action#user-content-advanced-canceling-other-workflows +name: Auto-cancel-outdated PR +on: + pull_request: + types: [opened, synchronize, reopened] +jobs: + cancel: + name: Auto-cancel-outdated PR + runs-on: ubuntu-latest + steps: + - uses: styfle/cancel-workflow-action@0.10.0 + with: + workflow_id: continuous-integration.yml + access_token: ${{ github.token }} diff --git a/.github/workflows/aws_build_cpu.yml b/.github/workflows/aws_build_cpu.yml new file mode 100644 index 000000000..f399a0fdf --- /dev/null +++ b/.github/workflows/aws_build_cpu.yml @@ -0,0 +1,195 @@ +# Build on an AWS instance (CPU). +name: AwsEc2BuildCpu + +on: + workflow_dispatch: + inputs: + instance_id: + description: 'Instance ID' + type: string + instance_image_id: + description: 'Instance AMI ID' + type: string + instance_type: + description: 'Instance product type' + type: string + runner_name: + description: 'Action runner name' + type: string + request_id: + description: 'Slab request ID' + type: string + matrix_item: + description: 'Build matrix item' + type: string + +env: + DOCKER_IMAGE_TEST: ghcr.io/zama-ai/concrete-compiler + GLIB_VER: 2_28 + +jobs: + BuildAndTest: + name: Build and test compiler in EC2 + concurrency: + group: ${{ github.ref }}_${{ github.event.inputs.instance_image_id }}_${{ github.event.inputs.instance_type }} + cancel-in-progress: true + runs-on: ${{ github.event.inputs.runner_name }} + if: ${{ !cancelled() }} + steps: + - name: Instance configuration used + run: | + echo "IDs: ${{ inputs.instance_id }}" + echo "AMI: ${{ inputs.instance_image_id }}" + echo "Type: ${{ inputs.instance_type }}" + echo "Request ID: ${{ inputs.request_id }}" + echo "Matrix item: ${{ inputs.matrix_item }}" + + # A SSH private key is required as some dependencies are from private repos + - name: Set up SSH agent + uses: webfactory/ssh-agent@v0.5.2 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - name: Set up env + # "Install rust" step require root user to have a HOME directory which is not set. + run: | + echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}" + #echo "SSH_AUTH_SOCK=$SSH_AUTH_SOCK)" >> "${GITHUB_ENV}" + echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}" + + - name: Fetch repository + uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Create build dir + run: mkdir build + + - name: Build compiler + uses: addnab/docker-run-action@v3 + id: build-compiler + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/build:/build + -v ${{ github.workspace }}/wheels:/wheels + -v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket + -e SSH_AUTH_SOCK=/ssh.socket + ${{ env.DOCKER_GPU_OPTION }} + shell: bash + run: | + set -e + cd /compiler + rm -rf /build/* + make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build all python-package build-end-to-end-dataflow-tests + echo "Debug: ccache statistics (after the build):" + ccache -s + + - name: Test compiler + uses: addnab/docker-run-action@v3 + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/build:/build + ${{ env.DOCKER_GPU_OPTION }} + shell: bash + run: | + set -e + cd /compiler + pip install pytest + sed "s/pytest/python -m pytest/g" -i Makefile + mkdir -p /tmp/concrete_compiler/gpu_tests/ + make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build run-tests run-end-to-end-dataflow-tests + chmod -R ugo+rwx /tmp/KeySetCache + + - name: Archive python package + uses: actions/upload-artifact@v3 + with: + name: concrete-compiler.whl + path: build/wheels/concrete_compiler-*-manylinux_{{ env.GLIB_VER }}_x86_64.whl + retention-days: 14 + + - name: Build the documentation + id: build-doc + if: ${{ steps.build-compiler.outcome == 'success' && !cancelled() }} + uses: addnab/docker-run-action@v3 + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/docs:/docs + shell: bash + run: | + set -e + rm -rf /build + make DATAFLOW_EXECUTION_ENABLED=ON CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC BUILD_DIR=/build concretecompiler python-bindings doc + cd /docs + pip install -r requirements.txt + pip install -r ../llvm-project/mlir/python/requirements.txt + dnf install -y doxygen + sed "s/sphinx-apidoc/python -m sphinx.ext.apidoc/g" -i Makefile + sed "s/sphinx-build/python -m sphinx.cmd.build/g" -i Makefile + make COMPILER_BUILD_DIR=/build/ doc + + - name: Archive docs artifacts + if: ${{ steps.build-doc.outcome == 'success' && !cancelled() }} + uses: actions/upload-artifact@v3 + with: + name: html-docs + path: docs/_build/html + + PublishDoc: + runs-on: ubuntu-20.04 + needs: BuildAndTest + if: ${{ startsWith(github.ref_name, 'v') || github.ref_name == 'main' }} + steps: + - name: Download Documentation + id: download + uses: actions/download-artifact@v3 + with: + name: html-docs + + - name: Publish Documentation to S3 + id: publish + if: ${{ steps.download.outcome == 'success' && !cancelled() }} + uses: jakejarvis/s3-sync-action@be0c4ab89158cac4278689ebedd8407dd5f35a83 + with: + args: --acl public-read + env: + AWS_S3_BUCKET: ${{ secrets.AWS_PREPROD_REPO_DOCUMENTATION_BUCKET_NAME }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + SOURCE_DIR: '.' + DEST_DIR: 'concrete-compiler/${{ github.ref_name }}' + + - name: Invalidate CloudFront Cache + if: ${{ steps.publish.outcome == 'success' }} + uses: awact/cloudfront-action@8bcfabc7b4bbc0cb8e55e48527f0e3a6d681627c + env: + SOURCE_PATH: '/concrete-compiler/*' + AWS_REGION: ${{ secrets.AWS_REGION }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DISTRIBUTION_ID: ${{ secrets.AWS_REPO_DOCUMENTATION_DISTRIBUTION_ID }} diff --git a/.github/workflows/aws_build_gpu.yml b/.github/workflows/aws_build_gpu.yml new file mode 100644 index 000000000..f9a993b69 --- /dev/null +++ b/.github/workflows/aws_build_gpu.yml @@ -0,0 +1,100 @@ +# Build on an AWS instance (GPU). +name: AwsEc2BuildGpu + +on: + workflow_dispatch: + inputs: + instance_id: + description: 'Instance ID' + type: string + instance_image_id: + description: 'Instance AMI ID' + type: string + instance_type: + description: 'Instance product type' + type: string + runner_name: + description: 'Action runner name' + type: string + request_id: + description: 'Slab request ID' + type: string + matrix_item: + description: 'Build matrix item' + type: string + +env: + DOCKER_IMAGE_TEST: ghcr.io/zama-ai/concrete-compiler + CUDA_PATH: /usr/local/cuda-11.7 + GCC_VERSION: 11 + +jobs: + BuildAndTest: + name: Build and test compiler in EC2 with CUDA support + concurrency: + group: ${{ github.ref }}_${{ github.event.inputs.instance_image_id }}_${{ github.event.inputs.instance_type }} + cancel-in-progress: true + runs-on: ${{ github.event.inputs.runner_name }} + if: ${{ !cancelled() }} + steps: + - name: Instance configuration used + run: | + echo "IDs: ${{ inputs.instance_id }}" + echo "AMI: ${{ inputs.instance_image_id }}" + echo "Type: ${{ inputs.instance_type }}" + echo "Request ID: ${{ inputs.request_id }}" + echo "Matrix item: ${{ inputs.matrix_item }}" + + # A SSH private key is required as some dependencies are from private repos + - name: Set up SSH agent + uses: webfactory/ssh-agent@v0.5.2 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - name: Set up env + # "Install rust" step require root user to have a HOME directory which is not set. + run: | + echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}" + #echo "SSH_AUTH_SOCK=$SSH_AUTH_SOCK)" >> "${GITHUB_ENV}" + echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}" + + - name: Fetch repository + uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Create build dir + run: mkdir build + + - name: Build and test compiler + uses: addnab/docker-run-action@v3 + id: build-compiler + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/build:/build + -v ${{ github.workspace }}/wheels:/wheels + -v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket + -e SSH_AUTH_SOCK=/ssh.socket + --gpus all + shell: bash + run: | + set -e + cd /compiler + rm -rf /build/* + mkdir -p /tmp/concrete_compiler/gpu_tests/ + make BINDINGS_PYTHON_ENABLED=OFF CCACHE=ON Python3_EXECUTABLE=$PYTHON_EXEC CUDA_SUPPORT=ON CUDA_PATH=${{ env.CUDA_PATH }} run-end-to-end-tests-gpu + echo "Debug: ccache statistics (after the build):" + ccache -s diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 000000000..5166d42df --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,256 @@ +# Run benchmarks on an AWS instance and return parsed results to Slab CI bot. +name: Performance benchmarks + +on: + push: + branches: + - 'main' + - 'test-bench-ci' + workflow_dispatch: + inputs: + backend: + description: 'Backend type' + required: true + default: 'cpu' + type: choice + options: + - cpu + - gpu + benchmark-name: + description: 'Benchmark name' + required: true + default: 'standard' + type: choice + options: + - standard + - application + ec2-instance-type: + description: 'EC2 instance type' + required: true + default: 'm6i.metal' + type: choice + options: + - m6i.metal + - c6a.metal + +env: + CARGO_TERM_COLOR: always + RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json + EC2_INSTANCE_TYPE: ${{ inputs.ec2-instance-type || 'm6i.metal' }} + CUDA_PATH: /usr/local/cuda-11.3 + GCC_VERSION: 8 + BACKEND: ${{ inputs.backend || 'cpu' }} + BENCHMARK_NAME: ${{ inputs.benchmark-name || 'standard' }} + +jobs: + start-runner: + name: Start EC2 runner + runs-on: ubuntu-20.04 + outputs: + label: ${{ steps.start-ec2-runner.outputs.label }} + ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }} + steps: + - name: Sets env vars for m6i.metal + if: ${{ env.EC2_INSTANCE_TYPE == 'm6i.metal' }} + run: | + echo "AWS_REGION=eu-west-3" >> $GITHUB_ENV + echo "EC2_IMAGE_ID=ami-0a24aaee029d1295c" >> $GITHUB_ENV + echo "SUBNET_ID=subnet-a886b4c1" >> $GITHUB_ENV + echo "SECURITY_GROUP_ID=sg-0bf1c1d79c97bc88f" >> $GITHUB_ENV + + - name: Sets env vars for c6a.metal + if: ${{ env.EC2_INSTANCE_TYPE == 'c6a.metal' }} + run: | + echo "AWS_REGION=us-east-1" >> $GITHUB_ENV + echo "EC2_IMAGE_ID=ami-0afb83d80b3b060d8" >> $GITHUB_ENV + echo "SUBNET_ID=subnet-da319dd4" >> $GITHUB_ENV + echo "SECURITY_GROUP_ID=sg-0f8b52622a2669491" >> $GITHUB_ENV + + - name: Sets env vars for p3.2xlarge + if: ${{ env.BACKEND == 'gpu' }} + run: | + echo "AWS_REGION=us-east-1" >> $GITHUB_ENV + echo "EC2_INSTANCE_TYPE=p3.2xlarge" >> $GITHUB_ENV + echo "EC2_IMAGE_ID=ami-03deb184ab492226b" >> $GITHUB_ENV + echo "SUBNET_ID=subnet-8123c9e7" >> $GITHUB_ENV + echo "SECURITY_GROUP_ID=sg-0f8b52622a2669491" >> $GITHUB_ENV + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ env.AWS_REGION }} + - name: Start EC2 runner + id: start-ec2-runner + uses: machulav/ec2-github-runner@v2 + with: + mode: start + github-token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }} + ec2-image-id: ${{ env.EC2_IMAGE_ID }} + ec2-instance-type: ${{ env.EC2_INSTANCE_TYPE }} + subnet-id: ${{ env.SUBNET_ID }} + security-group-id: ${{ env.SECURITY_GROUP_ID }} + aws-resource-tags: > + [ + {"Key": "Name", "Value": "compiler-benchmarks-github"} + ] + + run-benchmarks: + name: Execute end-to-end benchmarks in EC2 + runs-on: ${{ needs.start-runner.outputs.label }} + if: ${{ !cancelled() }} + needs: start-runner + steps: + - name: Get benchmark date + run: | + echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}" + + # SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.5.2 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - name: Fetch submodules + uses: actions/checkout@v3 + with: + fetch-depth: 0 + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Set up home + # "Install rust" step require root user to have a HOME directory which is not set. + run: | + echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}" + + - name: Export specific variables (CPU) + if: ${{ env.BACKEND == 'cpu' }} + run: | + echo "CUDA_SUPPORT=OFF" >> "${GITHUB_ENV}" + echo "BENCHMARK_TARGET=run-cpu-benchmarks" >> "${GITHUB_ENV}" + + - name: Export specific variables (GPU) + if: ${{ env.BACKEND == 'gpu' }} + run: | + echo "CUDA_SUPPORT=ON" >> "${GITHUB_ENV}" + echo "BENCHMARK_TARGET=run-gpu-benchmarks" >> "${GITHUB_ENV}" + echo "CUDA_PATH=$CUDA_PATH" >> "${GITHUB_ENV}" + echo "$CUDA_PATH/bin" >> "${GITHUB_PATH}" + echo "LD_LIBRARY_PATH=$CUDA_PATH/lib:$LD_LIBRARY_PATH" >> "${GITHUB_ENV}" + echo "CC=/usr/bin/gcc-${{ env.GCC_VERSION }}" >> "${GITHUB_ENV}" + echo "CXX=/usr/bin/g++-${{ env.GCC_VERSION }}" >> "${GITHUB_ENV}" + echo "CUDAHOSTCXX=/usr/bin/g++-${{ env.GCC_VERSION }}" >> "${GITHUB_ENV}" + echo "CUDACXX=$CUDA_PATH/bin/nvcc" >> "${GITHUB_ENV}" + + - name: Setup environment variable for benchmark target + if: ${{ env.BENCHMARK_NAME != 'standard' }} + run: | + echo "BENCHMARK_TARGET=${{ env.BENCHMARK_TARGET }}-${{ env.BENCHMARK_NAME }}" >> "${GITHUB_ENV}" + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Build compiler benchmarks + run: | + set -e + cd compiler + make BINDINGS_PYTHON_ENABLED=OFF CUDA_SUPPORT=${{ env.CUDA_SUPPORT }} build-benchmarks + + - name: Download KeySetCache + if: ${{ !contains(github.head_ref, 'newkeysetcache') }} + continue-on-error: true + run: | + cd compiler + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} make keysetcache_ci_populated + + - name: Run end-to-end benchmarks + run: | + set -e + cd compiler + make ${{ env.BENCHMARK_TARGET }} + + - name: Upload raw results artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }}_raw + path: compiler/benchmarks_results.json + + - name: Parse results + shell: bash + run: | + COMMIT_DATE="$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})" + COMMIT_HASH="$(git describe --tags --dirty)" + python3 ./ci/benchmark_parser.py compiler/benchmarks_results.json ${{ env.RESULTS_FILENAME }} \ + --database compiler_benchmarks \ + --hardware ${{ env.EC2_INSTANCE_TYPE }} \ + --project-version ${COMMIT_HASH} \ + --branch ${{ github.ref_name }} \ + --commit-date ${COMMIT_DATE} \ + --bench-date "${{ env.BENCH_DATE }}" + + - name: Upload parsed results artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }} + path: ${{ env.RESULTS_FILENAME }} + + - name: Checkout Slab repo + uses: actions/checkout@v3 + with: + repository: zama-ai/slab + path: slab + token: ${{ secrets.GH_TOKEN }} + + - name: Send data to Slab + shell: bash + run: | + echo "Computing HMac on downloaded artifact" + SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')" + echo "Sending results to Slab..." + curl -v -k \ + -H "Content-Type: application/json" \ + -H "X-Slab-Repository: ${{ github.repository }}" \ + -H "X-Slab-Command: store_data" \ + -H "X-Hub-Signature-256: sha256=${SIGNATURE}" \ + -d @${{ env.RESULTS_FILENAME }} \ + ${{ secrets.SLAB_URL }} + + stop-runner: + name: Stop EC2 runner + needs: + - start-runner + - run-benchmarks + runs-on: ubuntu-20.04 + if: ${{ always() && (needs.start-runner.result != 'skipped') }} + steps: + - name: Sets AWS region for m6i.metal + if: ${{ env.EC2_INSTANCE_TYPE == 'm6i.metal' }} + run: | + echo "AWS_REGION=eu-west-3" >> $GITHUB_ENV + + - name: Sets AWS region for c6a.metal + if: ${{ env.EC2_INSTANCE_TYPE == 'c6a.metal' }} + run: | + echo "AWS_REGION=us-east-1" >> $GITHUB_ENV + + - name: Sets AWS region for p3.2xlarge + if: ${{ env.BACKEND == 'gpu' }} + run: | + echo "AWS_REGION=us-east-1" >> $GITHUB_ENV + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v1 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ env.AWS_REGION }} + - name: Stop EC2 runner + uses: machulav/ec2-github-runner@v2 + with: + github-token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }} + label: ${{ needs.start-runner.outputs.label }} + ec2-instance-id: ${{ needs.start-runner.outputs.ec2-instance-id }} + mode: stop diff --git a/.github/workflows/block_merge.yml b/.github/workflows/block_merge.yml new file mode 100644 index 000000000..d25521f25 --- /dev/null +++ b/.github/workflows/block_merge.yml @@ -0,0 +1,18 @@ +name: Block PR merge + +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + BlockMerge: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Check Commit to Squash + run: | + set -e + git log origin/${{ github.base_ref }}..origin/${{ github.head_ref }} --format=%s | ( ! grep -e "^f [0-9a-f]\+" -q ) diff --git a/.github/workflows/build_on_pr_push.yml b/.github/workflows/build_on_pr_push.yml new file mode 100644 index 000000000..a8c5b398a --- /dev/null +++ b/.github/workflows/build_on_pr_push.yml @@ -0,0 +1,18 @@ +# Trigger an AWS build each time commits are pushed to a pull request. +name: PR AWS build trigger + +on: + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: mshick/add-pr-comment@v2 + with: + allow-repeats: true + message: | + @slab-ci cpu-build + @slab-ci gpu-build diff --git a/.github/workflows/format_and_linting.yml b/.github/workflows/format_and_linting.yml new file mode 100644 index 000000000..af782030c --- /dev/null +++ b/.github/workflows/format_and_linting.yml @@ -0,0 +1,45 @@ +name: Check format and run linters + +on: + push: + branches: main + pull_request: + types: [opened, synchronize, reopened] + +jobs: + FormattingAndLinting: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + - name: Format with clang-format (Cpp) + run: sudo apt install moreutils && .github/workflows/scripts/format_cpp.sh + - name: Format with cmake-format (Cmake) + run: pip3 install cmakelang && .github/workflows/scripts/format_cmake.sh + - name: Format with black (Python) + run: | + cd compiler + pip install -r lib/Bindings/Python/requirements_dev.txt + make check-python-format + - name: Lint with pylint (Python) + run: | + cd compiler + # compiler requirements to lint + pip install numpy + make python-lint + - name: Format with rustfmt (Rust) + run: | + cd compiler + make check-rust-format + - name: Linelint + uses: fernandrone/linelint@0.0.4 + id: linelint + + CheckLicense: + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v3 + - name: Check if sources include the license header + run: .github/workflows/scripts/check_for_license.sh diff --git a/.github/workflows/llvm-compatibility.yml b/.github/workflows/llvm-compatibility.yml new file mode 100644 index 000000000..3b4f7dcdc --- /dev/null +++ b/.github/workflows/llvm-compatibility.yml @@ -0,0 +1,76 @@ +name: LLVM Compatibility + +on: + workflow_dispatch: + schedule: + - cron: '0 00 * * 1' # Every Monday @ 00:00 UTC + +jobs: + build_test: + name: Build & test the Docker image with latest LLVM + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Rebase LLVM + run: | + cd llvm-project + git remote add base git@github.com:llvm/llvm-project.git + git fetch base + git rebase base/main + + - name: Log LLVM commit + run: echo "LLVM commit" && cd ${{ github.workspace }}/llvm-project && git log -1 + + - name: login + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + - name: Build + run: docker image build --no-cache -t compiler-latest-llvm -f builders/Dockerfile.concrete-compiler-env . + + - name: Test compiler with latest LLVM + uses: addnab/docker-run-action@v3 + with: + image: compiler-latest-llvm + run: | + cd /compiler + pip install pytest + make BUILD_DIR=/build run-tests + + - name: Update Custom LLVM + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GH_TOKEN }} + branch: main + force: true + repository: zama-ai/concrete-compiler-internal-llvm-project + + - name: Update LLVM submodule + id: update-llvm + run: | + git submodule update --remote + echo "::set-output name=commit::$(cd llvm-project && git rev-parse --short HEAD)" + + - name: Commit latest LLVM version + uses: EndBug/add-and-commit@v7 + with: + add: ./llvm-project + default_author: github_actions + message: "chore: update LLVM to ${{ steps.update-llvm.outputs.commit }}" + + - name: Send Slack Notification + if: ${{ always() }} + continue-on-error: true + uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7 + env: + SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }} + SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png + SLACK_COLOR: ${{ job.status }} + SLACK_MESSAGE: "Compatibility check with latest LLVM finished with status ${{ job.status }} \ + (${{ env.ACTION_RUN_URL }})" + SLACK_USERNAME: ${{ secrets.BOT_USERNAME }} + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/macos_build.yml b/.github/workflows/macos_build.yml new file mode 100644 index 000000000..54fbc4fe7 --- /dev/null +++ b/.github/workflows/macos_build.yml @@ -0,0 +1,81 @@ +# Perform a build on MacOS platform with M1 chip. +name: MacOsBuild + +on: + push: + branches: + - main + pull_request: + +jobs: + BuildAndTestMacOS: + runs-on: macos-11 + steps: + # A SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.6.0 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + + - name: Install Deps + run: | + brew install ninja ccache + pip3.10 install numpy pybind11==2.8 wheel delocate + pip3.10 install pytest + + - name: Cache compilation (push) + if: github.event_name == 'push' + uses: actions/cache@v3 + with: + path: /Users/runner/Library/Caches/ccache + key: ${{ runner.os }}-compilation-cache-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-compilation-cache- + + - name: Cache compilation (pull_request) + if: github.event_name == 'pull_request' + uses: actions/cache@v3 + with: + path: /Users/runner/Library/Caches/ccache + key: ${{ runner.os }}-compilation-cache-${{ github.event.pull_request.base.sha }} + restore-keys: | + ${{ runner.os }}-compilation-cache- + + - name: Get tmpdir path + if: github.event_name == 'push' + id: tmpdir-path + run: echo "::set-output name=TMPDIR_PATH::$TMPDIR" + + # We do run run-check-tests as part of the build, as they aren't that costly + # and will at least give minimum confidence that the compiler works in PRs + - name: Build + run: | + set -e + cd compiler + echo "Debug: ccache statistics (prior to the build):" + ccache -s + make Python3_EXECUTABLE=$(which python3.10) all run-check-tests python-package + echo "Debug: ccache statistics (after the build):" + ccache -s + + - name: Test + if: github.event_name == 'push' + run: | + set -e + cd compiler + echo "Debug: ccache statistics (prior to the tests):" + ccache -s + export CONCRETE_COMPILER_DATAFLOW_EXECUTION_ENABLED=OFF + pip3.10 install build/wheels/*macosx*.whl + make Python3_EXECUTABLE=$(which python3.10) run-tests + echo "Debug: ccache statistics (after the tests):" + ccache -s diff --git a/.github/workflows/ml_benchmark_subset.yml b/.github/workflows/ml_benchmark_subset.yml new file mode 100644 index 000000000..7c3f2c8bb --- /dev/null +++ b/.github/workflows/ml_benchmark_subset.yml @@ -0,0 +1,135 @@ +# Run one of the ML benchmarks on an AWS instance and return parsed results to Slab CI bot. +name: Application benchmarks + +on: + workflow_dispatch: + inputs: + instance_id: + description: 'Instance ID' + type: string + instance_image_id: + description: 'Instance AMI ID' + type: string + instance_type: + description: 'Instance product type' + type: string + runner_name: + description: 'Action runner name' + type: string + request_id: + description: 'Slab request ID' + type: string + matrix_item: + description: 'Build matrix item' + type: string + + +env: + CARGO_TERM_COLOR: always + RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json + +jobs: + run-ml-benchmarks: + name: Execute ML benchmarks subset in EC2 + runs-on: ${{ github.event.inputs.runner_name }} + if: ${{ !cancelled() }} + steps: + - name: Instance configuration used + run: | + echo "IDs: ${{ inputs.instance_id }}" + echo "AMI: ${{ inputs.instance_image_id }}" + echo "Type: ${{ inputs.instance_type }}" + echo "Request ID: ${{ inputs.request_id }}" + echo "Matrix item: ${{ inputs.matrix_item }}" + + - name: Get benchmark date + run: | + echo "BENCH_DATE=$(date --iso-8601=seconds)" >> "${GITHUB_ENV}" + + # SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.5.2 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - name: Fetch submodules + uses: actions/checkout@v3 + with: + fetch-depth: 0 + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Set up home + # "Install rust" step require root user to have a HOME directory which is not set. + run: | + echo "HOME=/home/ubuntu" >> "${GITHUB_ENV}" + + - name: Install rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + override: true + + - name: Build compiler and ML benchmarks + run: | + set -e + cd compiler + make BINDINGS_PYTHON_ENABLED=OFF build-mlbench + + - name: Download KeySetCache + if: ${{ !contains(github.head_ref, 'newkeysetcache') }} + continue-on-error: true + run: | + cd compiler + GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} make keysetcache_ci_populated + + - name: Run ML benchmarks + run: | + set -e + cd compiler + make BINDINGS_PYTHON_ENABLED=OFF ML_BENCH_SUBSET_ID=${{ inputs.matrix_item }} run-mlbench-subset + + - name: Upload raw results artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }}_raw + path: compiler/benchmarks_results.json + + - name: Parse results + shell: bash + run: | + COMMIT_DATE="$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})" + COMMIT_HASH="$(git describe --tags --dirty)" + python3 ./ci/benchmark_parser.py compiler/benchmarks_results.json ${{ env.RESULTS_FILENAME }} \ + --database compiler_benchmarks \ + --hardware ${{ inputs.instance_type }} \ + --project-version ${COMMIT_HASH} \ + --branch ${{ github.ref_name }} \ + --commit-date ${COMMIT_DATE} \ + --bench-date "${{ env.BENCH_DATE }}" + + - name: Upload parsed results artifact + uses: actions/upload-artifact@v3 + with: + name: ${{ github.sha }} + path: ${{ env.RESULTS_FILENAME }} + + - name: Checkout Slab repo + uses: actions/checkout@v3 + with: + repository: zama-ai/slab + path: slab + token: ${{ secrets.GH_TOKEN }} + + - name: Send data to Slab + shell: bash + run: | + echo "Computing HMac on downloaded artifact" + SIGNATURE="$(slab/scripts/hmac_calculator.sh ${{ env.RESULTS_FILENAME }} '${{ secrets.JOB_SECRET }}')" + echo "Sending results to Slab..." + curl -v -k \ + -H "Content-Type: application/json" \ + -H "X-Slab-Repository: ${{ github.repository }}" \ + -H "X-Slab-Command: store_data" \ + -H "X-Hub-Signature-256: sha256=${SIGNATURE}" \ + -d @${{ env.RESULTS_FILENAME }} \ + ${{ secrets.SLAB_URL }} diff --git a/.github/workflows/prepare_release.yml b/.github/workflows/prepare_release.yml new file mode 100644 index 000000000..d344a55ef --- /dev/null +++ b/.github/workflows/prepare_release.yml @@ -0,0 +1,371 @@ +# Prepare release packages for GNU/Linux and MacOS on private stores. +name: Prepare Release + +on: + push: + tags: + - 'v[0-9]+.*' + +env: + DOCKER_IMAGE_TEST: ghcr.io/zama-ai/concrete-compiler + GLIB_VER: 2_28 + +jobs: + WaitOnAllWorkflows: + runs-on: ubuntu-latest + if: ${{ startsWith(github.ref_name, 'v') }} + steps: + - uses: actions/checkout@v2 + - uses: ahmadnassri/action-workflow-run-wait@v1 + + CreateRelease: + runs-on: ubuntu-20.04 + needs: WaitOnAllWorkflows + outputs: + upload_url: ${{ steps.release.outputs.upload_url }} + release_id: ${{ steps.release.outputs.id }} + steps: + - name: Release + id: release + uses: softprops/action-gh-release@v1 + with: + token: ${{ secrets.GH_TOKEN_RELEASE }} + draft: true + prerelease: true + generate_release_notes: true + + BuildAndPushPythonPackagesLinux: + runs-on: ubuntu-20.04 + strategy: + matrix: + include: + - python: 37 + python_dir: "cp37-cp37m" + - python: 38 + python_dir: "cp38-cp38" + - python: 39 + python_dir: "cp39-cp39" + - python: 310 + python_dir: "cp310-cp310" + outputs: + python-package-name-linux-py37: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY37 }} + python-package-name-linux-py38: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY38 }} + python-package-name-linux-py39: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY39 }} + python-package-name-linux-py310: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME_PY310 }} + needs: CreateRelease + steps: + # A SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.5.2 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Update Python Version + run: cd compiler && make update-python-version + + - name: Login to Github Container Registry + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + - name: Set up ssh auth in docker + run: | + echo "SSH_AUTH_SOCK_DIR=$(dirname $SSH_AUTH_SOCK)" >> "${GITHUB_ENV}" + + - name: Build Wheel + uses: addnab/docker-run-action@v3 + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/wheels:/wheels + -v ${{ env.SSH_AUTH_SOCK }}:/ssh.socket + -e SSH_AUTH_SOCK=/ssh.socket + shell: bash + run: | + set -e + cd /compiler + rm -rf /build + export PYTHON_EXEC=/opt/python/${{ matrix.python_dir }}/bin/python + $PYTHON_EXEC -m pip install -r /llvm-project/mlir/python/requirements.txt + # setup env variable for wheel building + export CONCRETE_COMPILER_Python3_EXECUTABLE=$PYTHON_EXEC + export CONCRETE_COMPILER_BUILD_DIR=/build + export CONCRETE_COMPILER_CUDA_SUPPORT=ON + export PATH=/opt/python/${{ matrix.python_dir }}/bin:$PATH + make python-package + cp build/wheels/*manylinux_${{ env.GLIB_VER }}_x86_64.whl /wheels + echo "Debug: ccache statistics (after the build):" + ccache -s + + - name: Set Outputs + id: set-output-wheel-linux + run: | + echo "::set-output name=ASSET_NAME::$(find ${{ github.workspace }}/wheels/ -name '*manylinux*.whl' | rev |cut -d "/" -f 1 |rev )" + # used later for python package test + echo "::set-output name=ASSET_NAME_PY${{ matrix.python }}::$(find ${{ github.workspace }}/wheels/ -name '*manylinux*.whl' | rev |cut -d "/" -f 1 |rev )" + + - name: Upload Python Package + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }} + with: + upload_url: ${{ needs.CreateRelease.outputs.upload_url }} + asset_path: ${{ github.workspace }}/wheels/${{ steps.set-output-wheel-linux.outputs.ASSET_NAME }} + asset_name: ${{ steps.set-output-wheel-linux.outputs.ASSET_NAME }} + asset_content_type: application/zip + + BuildAndPushTarballLinux: + runs-on: ubuntu-20.04 + needs: CreateRelease + steps: + # A SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.6.0 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Login to Github Container Registry + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + - name: Build Tarball + uses: addnab/docker-run-action@v3 + with: + registry: ghcr.io + image: ${{ env.DOCKER_IMAGE_TEST }} + username: ${{ secrets.GHCR_LOGIN }} + password: ${{ secrets.GHCR_PASSWORD }} + options: >- + -v ${{ github.workspace }}/llvm-project:/llvm-project + -v ${{ github.workspace }}/compiler:/compiler + -v ${{ github.workspace }}/tarballs:/tarballs + -v ${{ github.workspace }}/.github/workflows/assets/Installation.md:/Installation.md + shell: bash + run: | + set -e + cd /compiler + rm -rf /build + make BINDINGS_PYTHON_ENABLED=OFF BUILD_DIR=/build INSTALL_PREFIX=/tarballs/ install + echo "Debug: ccache statistics (after the build):" + ccache -s + # package installation file and make tarball + cp /Installation.md /tarballs/concretecompiler/ + cd /tarballs && tar -czvf concretecompiler.tar.gz concretecompiler + + - name: Tag Tarball + id: tag-tarball + run: | + TAG="$(git describe --tags --abbrev=0)" + + sudo cp "${{ github.workspace }}/tarballs/concretecompiler.tar.gz" "${{ github.workspace }}/tarballs/concretecompiler-${TAG}-x86_64-linux-gnu.tar.gz" + echo "::set-output name=ASSET_NAME::concretecompiler-${TAG}-x86_64-linux-gnu.tar.gz" + + - name: Upload Tarball + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }} + with: + upload_url: ${{ needs.CreateRelease.outputs.upload_url }} + asset_path: ${{ github.workspace }}/tarballs/${{ steps.tag-tarball.outputs.ASSET_NAME }} + asset_name: ${{ steps.tag-tarball.outputs.ASSET_NAME }} + asset_content_type: application/tar+gzip + + BuildAndPushPackagesMacOS: + needs: CreateRelease + runs-on: macos-11 + strategy: + matrix: + python: [ '3.8', '3.9', '3.10' ] + outputs: + python-package-name-macos-py38: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY38 }} + python-package-name-macos-py39: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY39 }} + python-package-name-macos-py310: ${{ steps.build-wheel-macos.outputs.ASSET_NAME_PY310 }} + steps: + # A SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.5.0 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + + - name: Concrete-Optimizer + run: | + cd compiler + make concrete-optimizer-lib + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Install Deps + run: | + brew install ninja ccache + pip install numpy pybind11==2.8 wheel delocate + + - name: Update Python Version + run: cd compiler && make update-python-version + + - name: Use Compilation Cache + uses: actions/cache@v3 + with: + path: /Users/runner/Library/Caches/ccache + key: ${{ runner.os }}-compilation-cache-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-compilation-cache- + + - name: Build + id: build-wheel-macos + run: | + cd compiler + make Python3_EXECUTABLE=$(which python) DATAFLOW_EXECUTION_ENABLED=OFF python-bindings + export CONCRETE_COMPILER_DATAFLOW_EXECUTION_ENABLED=OFF + make python-package + cp -R build/wheels ${{ github.workspace }}/wheels + cd ${{ github.workspace }}/wheels/ + WHEEL=$(ls *macosx*.whl) + echo "::set-output name=ASSET_NAME::$WHEEL" + # used later for python package test + echo "::set-output name=ASSET_NAME_PY$(echo ${{ matrix.python }} |tr -d '.')::$WHEEL" + + - name: Upload Python Package + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }} + with: + upload_url: ${{ needs.CreateRelease.outputs.upload_url }} + asset_path: ${{ github.workspace }}/wheels/${{ steps.build-wheel-macos.outputs.ASSET_NAME }} + asset_name: ${{ steps.build-wheel-macos.outputs.ASSET_NAME }} + asset_content_type: application/zip + + - name: Build tarball + if: matrix.python == '3.8' + id: build-mac-tarball + run: | + cd compiler + make INSTALL_PREFIX=./tarballs/ install + cp ../.github/workflows/assets/Installation.md ./tarballs/concretecompiler/ + TAG=$(git describe --tags --abbrev=0) + cd ./tarballs && tar -czvf "concretecompiler-${TAG}-x86_64-macos-catalina.tar.gz" concretecompiler + echo "::set-output name=ASSET_NAME::concretecompiler-${TAG}-x86_64-macos-catalina.tar.gz" + + - name: Upload Tarball + if: matrix.python == '3.8' + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GH_TOKEN_RELEASE }} + with: + upload_url: ${{ needs.CreateRelease.outputs.upload_url }} + asset_path: ${{ github.workspace }}/compiler/tarballs/${{ steps.build-mac-tarball.outputs.ASSET_NAME }} + asset_name: ${{ steps.build-mac-tarball.outputs.ASSET_NAME }} + asset_content_type: application/tar+gzip + + TestPythonPackageLinux: + runs-on: ubuntu-20.04 + needs: BuildAndPushPythonPackagesLinux + strategy: + matrix: + include: + - python: '3.7' + filename-index: 'python-package-name-linux-py37' + - python: '3.8' + filename-index: 'python-package-name-linux-py38' + - python: '3.9' + filename-index: 'python-package-name-linux-py39' + - python: '3.10' + filename-index: 'python-package-name-linux-py310' + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Extract Package Filename + id: extract-filename + run: echo "::set-output name=FILE_NAME::$(echo '${{ toJson(needs.BuildAndPushPythonPackagesLinux.outputs) }}' | jq '.["${{ matrix.filename-index }}"]' | tr -d '\"' )" + + - name: Download and Install Package + run: | + FILE_NAME=$(curl -s -u "zama-bot:${{ secrets.GH_TOKEN_RELEASE }}" \ + https://api.github.com/repos/${{ github.repository }}/releases | \ + jq 'map(select(.tag_name == "${{ github.ref_name }}"))' | \ + jq '.[0].assets' | \ + jq 'map(select(.name == "${{ steps.extract-filename.outputs.FILE_NAME }}" ))' | \ + jq '.[].id') + + wget --auth-no-challenge --header='Accept:application/octet-stream' \ + "https://${{ secrets.GH_TOKEN_RELEASE }}:@api.github.com/repos/${{ github.repository }}/releases/assets/${FILE_NAME}" \ + -O ${{ steps.extract-filename.outputs.FILE_NAME }} + pip install ${{ steps.extract-filename.outputs.FILE_NAME }} + + - name: Test + run: | + cd compiler + pip install pytest + pytest -vs tests/python + + TestPythonPackageMacOS: + runs-on: macos-11 + needs: BuildAndPushPackagesMacOS + env: + SYSTEM_VERSION_COMPAT: 0 + strategy: + matrix: + include: + - python: '3.8' + filename-index: 'python-package-name-macos-py38' + - python: '3.9' + filename-index: 'python-package-name-macos-py39' + - python: '3.10' + filename-index: 'python-package-name-macos-py310' + steps: + - uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Extract Package Filename + id: extract-filename + run: echo "::set-output name=FILE_NAME::$(echo '${{ toJson(needs.BuildAndPushPackagesMacOS.outputs) }}' | jq '.["${{ matrix.filename-index }}"]' | tr -d '\"' )" + + - name: Download and Install Package + run: | + FILE_NAME=$(curl -s -u "zama-bot:${{ secrets.GH_TOKEN_RELEASE }}" \ + https://api.github.com/repos/${{ github.repository }}/releases | \ + jq 'map(select(.tag_name == "${{ github.ref_name }}"))' | \ + jq '.[0].assets' | \ + jq 'map(select(.name == "${{ steps.extract-filename.outputs.FILE_NAME }}" ))' | \ + jq '.[].id') + + wget --auth-no-challenge --header='Accept:application/octet-stream' \ + "https://${{ secrets.GH_TOKEN_RELEASE }}:@api.github.com/repos/${{ github.repository }}/releases/assets/${FILE_NAME}" \ + -O ${{ steps.extract-filename.outputs.FILE_NAME }} + pip install ${{ steps.extract-filename.outputs.FILE_NAME }} + + - name: Test + run: | + cd compiler + pip install pytest + pytest -vs -m "not parallel" tests/python diff --git a/.github/workflows/publish_docker_images.yml b/.github/workflows/publish_docker_images.yml new file mode 100644 index 000000000..1f2b37bca --- /dev/null +++ b/.github/workflows/publish_docker_images.yml @@ -0,0 +1,127 @@ +# Build and publish Docker images for different applications using AWS EC2. +name: Publish Docker Images + +on: + workflow_dispatch: + inputs: + instance_id: + description: 'Instance ID' + type: string + instance_image_id: + description: 'Instance AMI ID' + type: string + instance_type: + description: 'Instance product type' + type: string + runner_name: + description: 'Action runner name' + type: string + request_id: + description: 'Slab request ID' + type: string + matrix_item: + description: 'Build matrix item' + type: string + +env: + THIS_FILE: .github/workflows/continuous-integration.yml + +jobs: + BuildAndPushDockerImages: + needs: [BuildAndPublishHPXDockerImage, BuildAndPublishCUDADockerImage] + name: Build & Publish Docker Images + runs-on: ${{ github.event.inputs.runner_name }} + strategy: + matrix: + include: + - name: test-env + image: ghcr.io/zama-ai/concrete-compiler + dockerfile: builders/Dockerfile.concrete-compiler-env + + steps: + # SSH private key is required as some dependencies are from private repos + - uses: webfactory/ssh-agent@v0.6.0 + with: + ssh-private-key: ${{ secrets.CONCRETE_COMPILER_CI_SSH_PRIVATE }} + + - uses: actions/checkout@v3 + with: + submodules: recursive + token: ${{ secrets.GH_TOKEN }} + + - name: Login to Registry + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + # label was initially a need from the frontend CI + - name: Build Image + run: | + DOCKER_BUILDKIT=1 docker build --no-cache \ + --ssh default=${{ env.SSH_AUTH_SOCK }} \ + --label "commit-sha=${{ github.sha }}" -t ${{ matrix.image }} -f ${{ matrix.dockerfile }} . + + - name: Tag and Publish Image + run: | + docker image tag ${{ matrix.image }} ${{ matrix.image }}:${{ github.sha }} + docker image push ${{ matrix.image }}:latest + docker image push ${{ matrix.image }}:${{ github.sha }} + + - name: Tag and Publish Release Image + if: startsWith(github.ref, 'refs/tags/v') + run: | + docker image tag ${{ matrix.image }} ${{ matrix.image }}:${{ github.ref_name }} + docker image push ${{ matrix.image }}:${{ github.ref_name }} + + BuildAndPublishHPXDockerImage: + name: Build & Publish HPX Docker Image + runs-on: ubuntu-20.04 + env: + IMAGE: ghcr.io/zama-ai/hpx + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@v32 + + - name: Login + id: login + if: contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.hpx-env') || contains(steps.changed-files.outputs.modified_files, env.THIS_FILE) + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + - name: Build Tag and Publish + if: ${{ steps.login.conclusion != 'skipped' }} + run: | + docker build -t "${IMAGE}" -f builders/Dockerfile.hpx-env . + docker push "${IMAGE}:latest" + + BuildAndPublishCUDADockerImage: + name: Build & Publish CUDA Docker Image + runs-on: ubuntu-20.04 + env: + IMAGE: ghcr.io/zama-ai/cuda + TAG: "11-7" + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Get changed files + id: changed-files + uses: tj-actions/changed-files@v32 + + - name: Login + id: login + if: contains(steps.changed-files.outputs.modified_files, 'builders/Dockerfile.cuda-env') || contains(steps.changed-files.outputs.modified_files, env.THIS_FILE) + run: echo "${{ secrets.GHCR_PASSWORD }}" | docker login -u ${{ secrets.GHCR_LOGIN }} --password-stdin ghcr.io + + - name: Build Tag and Publish + if: ${{ steps.login.conclusion != 'skipped' }} + run: | + docker build -t "${IMAGE}" -f builders/Dockerfile.cuda-env . + docker image tag "${IMAGE}" "${IMAGE}:${TAG}" + docker push "${IMAGE}:latest" + docker push "${IMAGE}:${TAG}" diff --git a/.github/workflows/push-python-packages.yml b/.github/workflows/push-python-packages.yml new file mode 100644 index 000000000..1f176fafc --- /dev/null +++ b/.github/workflows/push-python-packages.yml @@ -0,0 +1,46 @@ +name: Push Python Packages + +on: + workflow_dispatch: + inputs: + internal_pypi: + description: 'Whether to push to internal pypi' + default: 'false' + required: true + type: boolean + public_pypi: + description: 'Whether to push to public pypi' + default: 'false' + required: true + type: boolean + tag: + description: 'Release tag to push' + required: true + +jobs: + PushPackages: + runs-on: ubuntu-latest + + steps: + - name: Download release assets + uses: duhow/download-github-release-assets@v1 + with: + token: ${{ secrets.GH_TOKEN_RELEASE }} + repository: ${{ github.repository }} + tag: ${{ github.event.inputs.tag }} + files: '*' + target: ${{ github.workspace }}/release/ + + - name: Install dependencies + if: ${{ github.event.inputs.internal_pypi || github.event.inputs.public_pypi }} + run: | + python -m pip install --upgrade pip + pip install twine + + - name: Push packages to internal pypi + if: ${{ github.event.inputs.internal_pypi }} + run: twine upload -u ${{ secrets.INTERNAL_PYPI_USER }} -p "${{ secrets.INTERNAL_PYPI_PASSWORD }}" --repository-url ${{ secrets.INTERNAL_PYPI_URL }} ${{ github.workspace }}/release/*.whl + + - name: Push packages to public pypi + if: ${{ github.event.inputs.public_pypi }} + run: twine upload -u ${{ secrets.PUBLIC_PYPI_USER }} -p "${{ secrets.PUBLIC_PYPI_PASSWORD }}" -r pypi ${{ github.workspace }}/release/*.whl diff --git a/.github/workflows/scripts/check_for_license.sh b/.github/workflows/scripts/check_for_license.sh new file mode 100755 index 000000000..660a19df7 --- /dev/null +++ b/.github/workflows/scripts/check_for_license.sh @@ -0,0 +1,27 @@ +#!/bin/bash + +print_and_exit() { + echo "Need to add license header to file $1" + exit 1 +} + +EXCLUDE_DIRS="-path ./compiler/include/boost-single-header -prune -o" + +files=$(find ./compiler/{include,lib,src} $EXCLUDE_DIRS -iregex '^.*\.\(cpp\|cc\|h\|hpp\)$' -print) + +for file in $files +do + cmp <(head -n 4 $file) <(echo "// Part of the Concrete Compiler Project, under the BSD3 License with Zama +// Exceptions. See +// https://github.com/zama-ai/concrete-compiler-internal/blob/main/LICENSE.txt +// for license information.") || print_and_exit $file +done + +# Ignore python package namespace init file +files=$(find ./compiler/{include,lib,src} -iregex '^.*\.\(py\)$' ! -path ./compiler/lib/Bindings/Python/concrete/__init__.py) + +for file in $files +do + cmp <(head -n 2 $file) <(echo "# Part of the Concrete Compiler Project, under the BSD3 License with Zama Exceptions. +# See https://github.com/zama-ai/concrete-compiler-internal/blob/main/LICENSE.txt for license information.") || print_and_exit $file +done diff --git a/.github/workflows/scripts/container_timestamp_check.sh b/.github/workflows/scripts/container_timestamp_check.sh new file mode 100755 index 000000000..45c1dc69c --- /dev/null +++ b/.github/workflows/scripts/container_timestamp_check.sh @@ -0,0 +1,67 @@ +#!/bin/bash -e + +set -e + +BASE_IMG_ENDPOINT_URL= +ENV_IMG_ENDPOINT_URL= +TOKEN= + +while [ -n "$1" ] +do + case "$1" in + "--base_img_url" ) + shift + BASE_IMG_ENDPOINT_URL="$1" + ;; + + "--env_img_url" ) + shift + ENV_IMG_ENDPOINT_URL="$1" + ;; + + "--token" ) + shift + TOKEN="$1" + ;; + + *) + echo "Unknown param : $1" + exit 1 + ;; + esac + shift +done + +BASE_JSON=$(curl \ +-X GET \ +-H "Accept: application/vnd.github.v3+json" \ +-H "Authorization: token ${TOKEN}" \ +"${BASE_IMG_ENDPOINT_URL}") + +BASE_IMG_TIMESTAMP=$(echo "${BASE_JSON}" | jq -r 'sort_by(.updated_at)[-1].updated_at') + +ENV_JSON=$(curl \ +-X GET \ +-H "Accept: application/vnd.github.v3+json" \ +-H "Authorization: token ${TOKEN}" \ +"${ENV_IMG_ENDPOINT_URL}") + +ENV_IMG_TIMESTAMP=$(echo "${ENV_JSON}" | \ +jq -rc '.[] | select(.metadata.container.tags[] | contains("latest")).updated_at') + +echo "Base timestamp: ${BASE_IMG_TIMESTAMP}" +echo "Env timestamp: ${ENV_IMG_TIMESTAMP}" + +BASE_IMG_DATE=$(date -d "${BASE_IMG_TIMESTAMP}" +%s) +ENV_IMG_DATE=$(date -d "${ENV_IMG_TIMESTAMP}" +%s) + +echo "Base epoch: ${BASE_IMG_DATE}" +echo "Env epoch: ${ENV_IMG_DATE}" + +if [[ "${BASE_IMG_DATE}" -ge "${ENV_IMG_DATE}" ]]; then + echo "Env image out of date, sending rebuild request." + exit 0 +else + echo "Image up to date, nothing to do." + exit 1 +fi diff --git a/.github/workflows/scripts/format_cmake.sh b/.github/workflows/scripts/format_cmake.sh new file mode 100755 index 000000000..14c992205 --- /dev/null +++ b/.github/workflows/scripts/format_cmake.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e -o pipefail + +cmake-format -i compiler/CMakeLists.txt -c compiler/.cmake-format-config.py + +find ./compiler/{include,lib,src,tests} -type f -name "CMakeLists.txt" | xargs -I % sh -c 'cmake-format -i % -c compiler/.cmake-format-config.py' + +# show changes if any +git --no-pager diff +# fail if there is a diff, success otherwise +git diff | ifne exit 1 diff --git a/.github/workflows/scripts/format_cpp.sh b/.github/workflows/scripts/format_cpp.sh new file mode 100755 index 000000000..94ac93cb3 --- /dev/null +++ b/.github/workflows/scripts/format_cpp.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -e -o pipefail + +EXCLUDE_DIRS="-path ./compiler/include/boost-single-header -prune -o" + +find ./compiler/{include,lib,src,tests} $EXCLUDE_DIRS -iregex '^.*\.\(cpp\|cc\|h\|hpp\)$' -print | xargs clang-format -i -style='file' + +# show changes if any +git --no-pager diff +# fail if there is a diff, success otherwise +git diff | ifne exit 1 diff --git a/.github/workflows/scripts/lint_cpp.sh b/.github/workflows/scripts/lint_cpp.sh new file mode 100755 index 000000000..0ba57f8e7 --- /dev/null +++ b/.github/workflows/scripts/lint_cpp.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +print_usage() { + local FD=$1 + echo "Usage: $0 [OPTION]" >&$FD + echo "Check if the sources comply with the checks from .clang-tidy" >&$FD + echo "" >&$FD + echo "Options:" >&$FD + echo " -f, --fix Advise clang-tidy to fix any issue" >&$FD + echo " found." >&$FD + echo " -h, --help Print this help." >&$FD +} + +die() { + echo "$@" >&2 + exit 1 +} + +check_buildfile() { + local FILE="$1" + + [ -f "$FILE" ] || + die "$FILE not found. Please run this script from within your build " \ + "directory." +} + +CLANG_TIDY_EXTRA_ARGS=() + +# Parse arguments +while [ $# -gt 0 ] +do + case $1 in + -f|--fix) + CLANG_TIDY_EXTRA_ARGS+=("--fix") + ;; + -h|--help) + print_usage 1 + exit 0 + ;; + *) + print_usage 2 + exit 1 + ;; + esac + + shift +done + +check_buildfile "CMakeFiles/CMakeDirectoryInformation.cmake" +check_buildfile "compile_commands.json" + +# Extract toplevel source directory from CMakeDirectoryInformation.cmake +# containing a line: +# +# set(CMAKE_RELATIVE_PATH_TOP_SOURCE "...") +TOP_SRCDIR=$(grep -o 'set\s*(\s*CMAKE_RELATIVE_PATH_TOP_SOURCE\s\+"[^"]\+")' \ + CMakeFiles/CMakeDirectoryInformation.cmake | \ + sed 's/set\s*(\s*CMAKE_RELATIVE_PATH_TOP_SOURCE\s\+"\([^"]\+\)")/\1/g') + +[ $? -eq 0 -a ! -z "$TOP_SRCDIR" ] || + die "Could not extract CMAKE_RELATIVE_PATH_TOP_SOURCE from CMake files." + +find "$TOP_SRCDIR/"{include,lib,src} \ + \( -iname "*.h" -o -iname "*.cpp" -o -iname "*.cc" \) | \ + xargs clang-tidy -p . -header-filter="$TOP_SRCDIR/include/.*\.h" \ + "${CLANG_TIDY_EXTRA_ARGS[@]}" diff --git a/.github/workflows/start_aws_builds.yml b/.github/workflows/start_aws_builds.yml new file mode 100644 index 000000000..a63e936cc --- /dev/null +++ b/.github/workflows/start_aws_builds.yml @@ -0,0 +1,36 @@ +# Start all AWS build jobs on Slab CI bot. +name: Start all AWS builds + +on: + push: + branch: + - 'main' + workflow_dispatch: + +jobs: + start-builds: + strategy: + matrix: + command: [cpu-build, gpu-build, docker-images-build] + runs-on: ubuntu-latest + steps: + - name: Checkout Slab repo + uses: actions/checkout@v3 + with: + repository: zama-ai/slab + path: slab + token: ${{ secrets.CONCRETE_ACTIONS_TOKEN }} + + - name: Start AWS job in Slab + shell: bash + # TODO: step result must be correlated to HTTP return code. + run: | + echo -n '{"command": "${{ matrix.command }}", "git_ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}' > command.json + SIGNATURE="$(slab/scripts/hmac_calculator.sh command.json '${{ secrets.JOB_SECRET }}')" + curl -v -k \ + -H "Content-Type: application/json" \ + -H "X-Slab-Repository: ${{ github.repository }}" \ + -H "X-Slab-Command: start_aws" \ + -H "X-Hub-Signature-256: sha256=${SIGNATURE}" \ + -d @command.json \ + ${{ secrets.SLAB_URL }} diff --git a/.github/workflows/start_ml_benchmarks.yml b/.github/workflows/start_ml_benchmarks.yml new file mode 100644 index 000000000..9c7616b7f --- /dev/null +++ b/.github/workflows/start_ml_benchmarks.yml @@ -0,0 +1,30 @@ +# Start application benchmarks job on Slab CI bot. +name: Start ML benchmarks + +on: + workflow_dispatch: + + # Have a weekly benchmark run on main branch to be available on Monday morning (Paris time) +# TODO: uncomment this section once MLBenchmarks are implemented +# schedule: +# # * is a special character in YAML so you have to quote this string +# # At 1:00 every Thursday +# # Timezone is UTC, so Paris time is +2 during the summer and +1 during winter +# - cron: '0 1 * * THU' + +jobs: + start-ml-benchmarks: + steps: + - name: Start AWS job in Slab + shell: bash + # TODO: step result must be correlated to HTTP return code. + run: | + PAYLOAD='{"command": "ml-bench", "git_ref": ${{ github.ref }}, "sha": ${{ github.sha }}}' + SIGNATURE="$(slab/scripts/hmac_calculator.sh ${PAYLOAD} '${{ secrets.JOB_SECRET }}')" + curl -v -k \ + -H "Content-Type: application/json" \ + -H "X-Slab-Repository: ${{ github.repository }}" \ + -H "X-Slab-Command: start_data" \ + -H "X-Hub-Signature-256: sha256=${SIGNATURE}" \ + -d @${{ PAYLOAD }} \ + ${{ secrets.SLAB_URL }} diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..853c8011e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,20 @@ +[submodule "llvm-project"] + path = compilers/concrete-compiler/llvm-project + url = git@github.com:zama-ai/concrete-compiler-internal-llvm-project.git + shallow = true +[submodule "compiler/concrete-optimizer"] + path = compilers/concrete-compiler/compiler/concrete-optimizer + url = git@github.com:zama-ai/concrete-optimizer.git + shallow = false +[submodule "compiler/concrete-core"] + path = compilers/concrete-compiler/compiler/concrete-core + url = git@github.com:zama-ai/concrete-core.git + shallow = true +[submodule "compiler/parameter-curves"] + path = compilers/concrete-compiler/compiler/parameter-curves + url = git@github.com:zama-ai/parameter-curves.git + shallow = true +[submodule "compiler/concrete-cpu"] + path = compilers/concrete-compiler/compiler/concrete-cpu + url = git@github.com:zama-ai/concrete-cpu.git + shallow = true diff --git a/.linelint.yml b/.linelint.yml new file mode 100644 index 000000000..4e58c4302 --- /dev/null +++ b/.linelint.yml @@ -0,0 +1,23 @@ +# 'true' will fix files +autofix: false + +# list of paths to ignore, uses gitignore syntaxes (executes before any rule) +ignore: + - .git/ + - llvm-project/ + - compiler/concrete-optimizer + - compiler/concrete-core + - compiler/parameter-curves + - google-benchmark + +rules: + # checks if file ends in a newline character + end-of-file: + # set to true to enable this rule + enable: true + + # set to true to disable autofix (if enabled globally) + disable-autofix: false + + # if true also checks if file ends in a single newline character + single-new-line: false diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 000000000..62fdc0b51 --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,28 @@ +BSD 3-Clause Clear License + +Copyright © 2022 ZAMA. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this +list of conditions and the following disclaimer in the documentation and/or other +materials provided with the distribution. + +3. Neither the name of ZAMA nor the names of its contributors may be used to endorse +or promote products derived from this software without specific prior written permission. + +NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. +THIS SOFTWARE IS PROVIDED BY THE ZAMA AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL +ZAMA OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, +OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF +ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/compilers/concrete-compiler/.gitignore b/compilers/concrete-compiler/.gitignore new file mode 100644 index 000000000..e7fa8f484 --- /dev/null +++ b/compilers/concrete-compiler/.gitignore @@ -0,0 +1,55 @@ +## C++ + +# Prerequisites +*.d + +# Compiled Object files +*.slo +*.lo +*.o +*.obj + +# Precompiled Headers +*.gch +*.pch + +# Compiled Dynamic libraries +*.so +*.dylib +*.dll + +# Fortran module files +*.mod +*.smod + +# Compiled Static libraries +*.lai +*.la +*.a +*.lib + +# Executables +*.exe +*.out +*.app + +# VSCODE +.vscode/ + +# Jetbrains tools +.idea/ + +# Python cache +__pycache__/ + +# Sphinx +_build/ +.venv + +# macOS +.DS_Store + + +compiler/tests/TestLib/out/ +compiler/lib/Bindings/Rust/target/ +compiler/lib/Bindings/Rust/Cargo.lock diff --git a/compilers/concrete-compiler/README.md b/compilers/concrete-compiler/README.md new file mode 120000 index 000000000..15569d1e5 --- /dev/null +++ b/compilers/concrete-compiler/README.md @@ -0,0 +1 @@ +./compiler/README.md \ No newline at end of file diff --git a/compilers/concrete-compiler/builders/Dockerfile.concrete-compiler-env b/compilers/concrete-compiler/builders/Dockerfile.concrete-compiler-env new file mode 100644 index 000000000..130f3511b --- /dev/null +++ b/compilers/concrete-compiler/builders/Dockerfile.concrete-compiler-env @@ -0,0 +1,49 @@ +FROM quay.io/pypa/manylinux_2_28_x86_64:2022-11-19-1b19e81 + +# epel-release is for install ccache +# clang is needed for rust bindings +RUN dnf install -y epel-release +RUN dnf update -y +RUN dnf install -y ninja-build hwloc-devel ccache clang ncurses-devel +RUN dnf install -y openssh-clients +RUN dnf clean all +RUN mkdir -p ~/.ssh/ && ssh-keyscan -t ecdsa github.com >> ~/.ssh/known_hosts +# setup ccache with an unlimited amount of files and storage +RUN ccache -M 0 +RUN ccache -F 0 +# Install Rust +RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y +ENV PATH=/root/.cargo/bin:$PATH +SHELL ["/bin/bash", "-c"] +# Install boost +ADD https://boostorg.jfrog.io/artifactory/main/release/1.71.0/source/boost_1_71_0.tar.gz /boost_1_71_0.tar.gz +RUN tar -xzvf /boost_1_71_0.tar.gz +WORKDIR /boost_1_71_0 +RUN ./bootstrap.sh && ./b2 --with-filesystem install +# Setup HPX +COPY --from=ghcr.io/zama-ai/hpx:latest /hpx /hpx +ENV HPX_INSTALL_DIR=/hpx/build +# Setup CUDA +COPY --from=ghcr.io/zama-ai/cuda:11-7 /usr/local/cuda-11.7/ /usr/local/cuda-11.7/ +COPY --from=ghcr.io/zama-ai/cuda:11-7 /usr/lib64/libcuda.so* /usr/lib64/ +ENV PATH "$PATH:/usr/local/cuda-11.7/bin" +# Set the python path. Options: [cp37-cp37m, cp38-cp38, cp39-cp39, cp310-cp310] +# Links and env would be available to use the appropriate python version +ARG python_tag=cp38-cp38 +RUN ln -s /opt/python/${python_tag}/bin/pip /bin/pip +RUN ln -s /opt/python/${python_tag}/bin/python /bin/python +ENV PYTHON_EXEC=/opt/python/${python_tag}/bin/python +# Install python deps +RUN pip install numpy pybind11==2.8 PyYAML pytest wheel auditwheel +# Setup LLVM +COPY /llvm-project /llvm-project +# Setup and build compiler +COPY /compiler /compiler +WORKDIR /compiler +RUN mkdir -p /build +RUN --mount=type=ssh make DATAFLOW_EXECUTION_ENABLED=ON BUILD_DIR=/build CCACHE=ON \ + Python3_EXECUTABLE=${PYTHON_EXEC} \ + concretecompiler python-bindings rust-bindings +ENV PYTHONPATH "$PYTHONPATH:/build/tools/concretelang/python_packages/concretelang_core" +ENV PATH "$PATH:/build/bin" +RUN ccache -z diff --git a/compilers/concrete-compiler/builders/Dockerfile.cuda-env b/compilers/concrete-compiler/builders/Dockerfile.cuda-env new file mode 100644 index 000000000..8a2ba3334 --- /dev/null +++ b/compilers/concrete-compiler/builders/Dockerfile.cuda-env @@ -0,0 +1,14 @@ +FROM quay.io/pypa/manylinux_2_28_x86_64:2022-11-19-1b19e81 + +RUN dnf install -y kernel-devel kernel-headers +RUN curl https://developer.download.nvidia.com/compute/cuda/11.7.1/local_installers/cuda-repo-rhel8-11-7-local-11.7.1_515.65.01-1.x86_64.rpm -o cuda-repo-rhel8-11-7-local-11.7.1_515.65.01-1.x86_64.rpm +RUN rpm -i cuda-repo-rhel8-11-7-local-11.7.1_515.65.01-1.x86_64.rpm +RUN dnf clean all +RUN dnf install -y epel-release +RUN dnf update -y +RUN dnf -y module install nvidia-driver:latest-dkms +RUN dnf -y install cuda + +FROM scratch +COPY --from=0 /usr/local/cuda-11.7/ /usr/local/cuda-11.7/ +COPY --from=0 /usr/lib64/libcuda.so* /usr/lib64/ diff --git a/compilers/concrete-compiler/builders/Dockerfile.hpx-env b/compilers/concrete-compiler/builders/Dockerfile.hpx-env new file mode 100644 index 000000000..98c35aaa4 --- /dev/null +++ b/compilers/concrete-compiler/builders/Dockerfile.hpx-env @@ -0,0 +1,25 @@ +FROM quay.io/pypa/manylinux_2_28_x86_64:2022-11-19-1b19e81 + +RUN dnf update -y +RUN dnf install -y ninja-build hwloc-devel +# Install boost +ADD https://boostorg.jfrog.io/artifactory/main/release/1.71.0/source/boost_1_71_0.tar.gz /boost_1_71_0.tar.gz +RUN tar -xzvf /boost_1_71_0.tar.gz +WORKDIR /boost_1_71_0 +RUN ./bootstrap.sh && ./b2 --with-filesystem install +# Build HPX +RUN git clone https://github.com/STEllAR-GROUP/hpx.git /hpx +WORKDIR /hpx +RUN git checkout 1.7.1 +RUN mkdir build +# empty HPX_WITH_MAX_CPU_COUNT = dynamic +# ref https://github.com/STEllAR-GROUP/hpx/blob/1.7.1/CMakeLists.txt#L759 +RUN cd build && cmake \ + -DHPX_WITH_MAX_CPU_COUNT="" \ + -DHPX_WITH_FETCH_ASIO=on \ + -DHPX_FILESYSTEM_WITH_BOOST_FILESYSTEM_COMPATIBILITY=ON \ + -DHPX_WITH_MALLOC=system .. +RUN cd build && make -j2 + +FROM scratch +COPY --from=0 /hpx/ /hpx/ diff --git a/compilers/concrete-compiler/builders/Dockerfile.keysetcache-env b/compilers/concrete-compiler/builders/Dockerfile.keysetcache-env new file mode 100644 index 000000000..2b1b8d5ec --- /dev/null +++ b/compilers/concrete-compiler/builders/Dockerfile.keysetcache-env @@ -0,0 +1,2 @@ +FROM alpine:latest +COPY KeySetCache /KeySetCache diff --git a/compilers/concrete-compiler/ci/benchmark_parser.py b/compilers/concrete-compiler/ci/benchmark_parser.py new file mode 100644 index 000000000..47c0fca59 --- /dev/null +++ b/compilers/concrete-compiler/ci/benchmark_parser.py @@ -0,0 +1,103 @@ +""" +benchmark_parser +---------------- + +Parse benchmark raw results. +""" +import argparse +import pathlib +import json + + +parser = argparse.ArgumentParser() +parser.add_argument('results_path', + help=('Location of raw benchmark results,' + ' could be either a file or a directory.' + 'In a case of a directory, this script will attempt to parse all the' + 'files containing a .json extension')) +parser.add_argument('output_file', help='File storing parsed results') +parser.add_argument('-d', '--database', dest='database', required=True, + help='Name of the database used to store results') +parser.add_argument('-w', '--hardware', dest='hardware', required=True, + help='Hardware reference used to perform benchmark') +parser.add_argument('-V', '--project-version', dest='project_version', required=True, + help='Commit hash reference') +parser.add_argument('-b', '--branch', dest='branch', required=True, + help='Git branch name on which benchmark was performed') +parser.add_argument('--commit-date', dest='commit_date', required=True, + help='Timestamp of commit hash used in project_version') +parser.add_argument('--bench-date', dest='bench_date', required=True, + help='Timestamp when benchmark was run') + + +def parse_results(raw_results): + """ + Parse raw benchmark results. + + :param raw_results: path to file that contains raw results as :class:`pathlib.Path` + + :return: :class:`list` of data points + """ + raw_results = json.loads(raw_results.read_text()) + return [ + {"value": res["cpu_time"], "test": res["name"]} + for res in raw_results["benchmarks"] + ] + + +def recursive_parse(directory): + """ + Parse all the benchmark results in a directory. It will attempt to parse all the files having a + .json extension at the top-level of this directory. + + :param directory: path to directory that contains raw results as :class:`pathlib.Path` + + :return: :class:`list` of data points + """ + result_values = [] + for file in directory.glob('*.json'): + try: + result_values.extend(parse_results(file)) + except KeyError as err: + print(f"Failed to parse '{file.resolve()}': {repr(err)}") + + return result_values + + +def dump_results(parsed_results, filename, input_args): + """ + Dump parsed results formatted as JSON to file. + + :param parsed_results: :class:`list` of data points + :param filename: filename for dump file as :class:`pathlib.Path` + :param input_args: CLI input arguments + """ + filename.parent.mkdir(parents=True, exist_ok=True) + series = { + "database": input_args.database, + "hardware": input_args.hardware, + "project_version": input_args.project_version, + "branch": input_args.branch, + "insert_date": input_args.bench_date, + "commit_date": input_args.commit_date, + "points": parsed_results, + } + filename.write_text(json.dumps(series)) + + +if __name__ == "__main__": + args = parser.parse_args() + + results_path = pathlib.Path(args.results_path) + print("Parsing benchmark results... ") + if results_path.is_dir(): + results = recursive_parse(results_path) + else: + results = parse_results(results_path) + print("Parsing results done") + + output_file = pathlib.Path(args.output_file) + print(f"Dump parsed results into '{output_file.resolve()}' ... ", end="") + dump_results(results, output_file, args) + + print("Done") diff --git a/compilers/concrete-compiler/ci/slab.toml b/compilers/concrete-compiler/ci/slab.toml new file mode 100644 index 000000000..5addcbeb4 --- /dev/null +++ b/compilers/concrete-compiler/ci/slab.toml @@ -0,0 +1,45 @@ +[profile.m6i] +region = "eu-west-3" +image_id = "ami-0a24aaee029d1295c" # Based on Ubuntu 22.4 +instance_type = "m6i.metal" +subnet_id = "subnet-a886b4c1" +security_group= ["sg-0bf1c1d79c97bc88f", ] + +[profile.m6i-old] +region = "eu-west-3" +image_id = "ami-05e4c0e628378ad6d" # Based on Ubuntu 20.4 +instance_type = "m6i.metal" +subnet_id = "subnet-a886b4c1" +security_group= ["sg-0bf1c1d79c97bc88f", ] + +[profile.gpu] +region = "us-east-1" +image_id = "ami-0c4773f5626d919b6" +instance_type = "p3.2xlarge" +subnet_id = "subnet-8123c9e7" +security_group= ["sg-0f8b52622a2669491", ] + +# Trigger CPU build +[command.cpu-build] +workflow = "aws_build_cpu.yml" +profile = "m6i-old" +check_run_name = "AWS CPU build (Slab)" + +# Trigger GPU build +[command.gpu-build] +workflow = "aws_build_gpu.yml" +profile = "gpu" +check_run_name = "AWS GPU build (Slab)" + +# Trigger Docker images build +[command.docker-images-build] +workflow = "publish_docker_images.yml" +profile = "m6i-old" +check_run_name = "AWS Docker images build & publish (Slab)" + +# Trigger ML benchmarks by running each use cases subset in parallel. +[command.ml-bench] +workflow = "ml_benchmark_subset.yml" +profile = "m6i" +matrix = [0,1,2,3,4,5,6,7,8,9,10] +max_parallel_jobs = 2 diff --git a/compilers/concrete-compiler/compiler/.clang-format b/compilers/concrete-compiler/compiler/.clang-format new file mode 100644 index 000000000..9b3aa8b72 --- /dev/null +++ b/compilers/concrete-compiler/compiler/.clang-format @@ -0,0 +1 @@ +BasedOnStyle: LLVM diff --git a/compilers/concrete-compiler/compiler/.clang-tidy b/compilers/concrete-compiler/compiler/.clang-tidy new file mode 100644 index 000000000..2d8cca6b5 --- /dev/null +++ b/compilers/concrete-compiler/compiler/.clang-tidy @@ -0,0 +1,18 @@ +Checks: '-*,clang-diagnostic-*,llvm-*,misc-*,-misc-unused-parameters,-misc-non-private-member-variables-in-classes,readability-identifier-naming' +CheckOptions: + - key: readability-identifier-naming.ClassCase + value: CamelCase + - key: readability-identifier-naming.EnumCase + value: CamelCase + - key: readability-identifier-naming.FunctionCase + value: camelBack + - key: readability-identifier-naming.MemberCase + value: camelBack + - key: readability-identifier-naming.ParameterCase + value: camelBack + - key: readability-identifier-naming.UnionCase + value: CamelCase + - key: readability-identifier-naming.VariableCase + value: camelBack + - key: readability-identifier-naming.IgnoreMainLikeFunctions + value: 1 diff --git a/compilers/concrete-compiler/compiler/.cmake-format-config.py b/compilers/concrete-compiler/compiler/.cmake-format-config.py new file mode 100644 index 000000000..23d1ae80c --- /dev/null +++ b/compilers/concrete-compiler/compiler/.cmake-format-config.py @@ -0,0 +1,11 @@ +# ----------------------------- +# Options effecting formatting. +# ----------------------------- +with section("format"): + + # How wide to allow formatted cmake files + line_width = 120 + + # How many spaces to tab for indent + tab_size = 2 + diff --git a/compilers/concrete-compiler/compiler/.gitignore b/compilers/concrete-compiler/compiler/.gitignore new file mode 100644 index 000000000..7ae8b1d18 --- /dev/null +++ b/compilers/concrete-compiler/compiler/.gitignore @@ -0,0 +1,13 @@ +# Build dirs +build*/ + +*.mlir.script +*.lit_test_times.txt + +# Test-generated artifacts +concrete-compiler_compilation_artifacts/ +py_test_lib_compile_and_run_custom_perror/ +tests/end_to_end_fixture/end_to_end_linalg_apply_lookup_table.yaml +tests/end_to_end_fixture/end_to_end_linalg_leveled.yaml +tests/end_to_end_fixture/end_to_end_linalg_2_apply_lookup_table.yaml +tests/end_to_end_fixture/bug_report.yaml diff --git a/compilers/concrete-compiler/compiler/CMakeLists.txt b/compilers/concrete-compiler/compiler/CMakeLists.txt new file mode 100644 index 000000000..774751b3d --- /dev/null +++ b/compilers/concrete-compiler/compiler/CMakeLists.txt @@ -0,0 +1,162 @@ +cmake_minimum_required(VERSION 3.17) + +project(concretecompiler LANGUAGES C CXX) + +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# Needed on linux with clang 15 and on MacOS because cxx emits dollars in the optimizer C++ API +add_definitions("-Wno-dollar-in-identifier-extension") + +add_definitions("-Wall ") +add_definitions("-Werror ") +add_definitions("-Wfatal-errors") + +# If we are trying to build the compiler with LLVM/MLIR as libraries +if(NOT DEFINED LLVM_EXTERNAL_CONCRETELANG_SOURCE_DIR) + message(FATAL_ERROR "Concrete compiler requires a unified build with LLVM/MLIR") +endif() + +# CMake library generation settings. +set(BUILD_SHARED_LIBS + OFF + CACHE BOOL "Default to building a static mondo-lib") +set(CMAKE_PLATFORM_NO_VERSIONED_SONAME + ON + CACHE BOOL "Python soname linked libraries are bad") +set(CMAKE_VISIBILITY_INLINES_HIDDEN + ON + CACHE BOOL "Hide inlines") + +# The -fvisibility=hidden option only works for static builds. +if(BUILD_SHARED_LIBS AND (CMAKE_CXX_VISIBILITY_PRESET STREQUAL "hidden")) + message(FATAL_ERROR "CMAKE_CXX_VISIBILITY_PRESET=hidden is incompatible \ + with BUILD_SHARED_LIBS.") +endif() + +set(MLIR_MAIN_SRC_DIR ${LLVM_MAIN_SRC_DIR}/../mlir) # --src-root +set(MLIR_INCLUDE_DIR ${MLIR_MAIN_SRC_DIR}/include) # --includedir +set(MLIR_TABLEGEN_OUTPUT_DIR ${LLVM_BINARY_DIR}/tools/mlir/include) +set(MLIR_TABLEGEN_EXE $) +include_directories(SYSTEM ${MLIR_INCLUDE_DIR}) +include_directories(SYSTEM ${MLIR_TABLEGEN_OUTPUT_DIR}) + +list(APPEND CMAKE_MODULE_PATH "${MLIR_MAIN_SRC_DIR}/cmake/modules") + +include_directories(${PROJECT_SOURCE_DIR}/include) +include_directories(${PROJECT_BINARY_DIR}/include) +link_directories(${LLVM_BUILD_LIBRARY_DIR}) +add_definitions(${LLVM_DEFINITIONS}) + +# Custom doc generation function +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules") +include(AddConcretelangDoc) +set(CONCRETELANG_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}) + +# ------------------------------------------------------------------------------- +# Concrete Security curves Configuration +# ------------------------------------------------------------------------------- +include_directories(${PROJECT_SOURCE_DIR}/parameter-curves/concrete-security-curves-cpp/include) + +# ------------------------------------------------------------------------------- +# Concrete CPU Configuration +# ------------------------------------------------------------------------------- +set(CONCRETE_CPU_STATIC_LIB "${PROJECT_SOURCE_DIR}/concrete-cpu/target/release/libconcrete_cpu.a") +ExternalProject_Add( + concrete_cpu_rust + DOWNLOAD_COMMAND "" + CONFIGURE_COMMAND "" OUTPUT "${CONCRETE_CPU_STATIC_LIB}" + BUILD_COMMAND cargo build + COMMAND cargo build --release + BINARY_DIR "${PROJECT_SOURCE_DIR}/concrete-cpu" + INSTALL_COMMAND "" + LOG_BUILD ON) +add_library(concrete_cpu STATIC IMPORTED) +# TODO - Change that to a location in the release dir +set(CONCRETE_CPU_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/concrete-cpu/concrete-cpu") +set_target_properties(concrete_cpu PROPERTIES IMPORTED_LOCATION "${CONCRETE_CPU_STATIC_LIB}") +add_dependencies(concrete_cpu concrete_cpu_rust) + +# -------------------------------------------------------------------------------- +# Concrete Cuda Configuration +# -------------------------------------------------------------------------------- +option(CONCRETELANG_CUDA_SUPPORT "Support Concrete CUDA Execution." OFF) + +if(CONCRETELANG_CUDA_SUPPORT) + if(NOT DEFINED CONCRETE_CORE_PATH) + message(FATAL_ERROR "Compiling with CUDA support requires setting CONCRETE_CORE_PATH") + endif() + remove_definitions("-Werror ") + message(STATUS "Building with Concrete CUDA execution support") + find_package(CUDAToolkit REQUIRED) + message(STATUS "Found CUDA version: ${CUDAToolkit_VERSION}") + message(STATUS "Found CUDA library dir: ${CUDAToolkit_LIBRARY_DIR}") + link_directories(${CUDAToolkit_LIBRARY_DIR}) + add_subdirectory(${CONCRETE_CORE_PATH}/concrete-cuda/cuda) + include_directories(${CONCRETE_CORE_PATH}/concrete-cuda/cuda/include) + include_directories(${CUDAToolkit_INCLUDE_DIRS}) + add_compile_options(-DCONCRETELANG_CUDA_SUPPORT) +endif() + +# -------------------------------------------------------------------------------- +# Python Configuration +# ------------------------------------------------------------------------------- +option(CONCRETELANG_BINDINGS_PYTHON_ENABLED "Enables ConcreteLang Python bindings." ON) + +if(CONCRETELANG_BINDINGS_PYTHON_ENABLED) + message(STATUS "ConcreteLang Python bindings are enabled.") + + include(MLIRDetectPythonEnv) + mlir_configure_python_dev_packages() + set(CONCRETELANG_PYTHON_PACKAGES_DIR ${CMAKE_CURRENT_BINARY_DIR}/python_packages) +else() + message(STATUS "ConcreteLang Python bindings are disabled.") +endif() + +# ------------------------------------------------------------------------------- +# DFR - parallel execution configuration +# ------------------------------------------------------------------------------- +option(CONCRETELANG_DATAFLOW_EXECUTION_ENABLED "Enables dataflow execution for ConcreteLang." ON) +option(CONCRETELANG_TIMING_ENABLED "Enables execution timing." ON) + +if(CONCRETELANG_DATAFLOW_EXECUTION_ENABLED) + message(STATUS "ConcreteLang dataflow execution enabled.") + + find_package(HPX REQUIRED CONFIG) + list(APPEND CMAKE_MODULE_PATH "${HPX_CMAKE_DIR}") + add_compile_options(-DCONCRETELANG_DATAFLOW_EXECUTION_ENABLED + -DHPX_DEFAULT_CONFIG_FILE="${PROJECT_SOURCE_DIR}/hpx.ini") + +else() + message(STATUS "ConcreteLang dataflow execution disabled.") +endif() + +if(CONCRETELANG_TIMING_ENABLED) + add_compile_options(-DCONCRETELANG_TIMING_ENABLED) +else() + message(STATUS "ConcreteLang execution timing disabled.") +endif() + +# ------------------------------------------------------------------------------- +# Unit tests +# ------------------------------------------------------------------------------- +option(CONCRETELANG_UNIT_TESTS "Enables the build of unittests" ON) + +# ------------------------------------------------------------------------------- +# Benchmarks +# ------------------------------------------------------------------------------- +option(CONCRETELANG_BENCHMARK "Enables the build of benchmarks" ON) + +# ------------------------------------------------------------------------------- +# Handling sub dirs +# ------------------------------------------------------------------------------- +include_directories(${CONCRETE_OPTIMIZER_DIR}/concrete-optimizer-cpp/src/cpp) + +add_subdirectory(include) +add_subdirectory(lib) +add_subdirectory(src) +add_subdirectory(tests) + +add_subdirectory(${CONCRETE_OPTIMIZER_DIR}/concrete-optimizer-cpp/cmake-utils) diff --git a/compilers/concrete-compiler/compiler/Makefile b/compilers/concrete-compiler/compiler/Makefile new file mode 100644 index 000000000..8a61f2b09 --- /dev/null +++ b/compilers/concrete-compiler/compiler/Makefile @@ -0,0 +1,523 @@ +BUILD_TYPE?=Release +BUILD_DIR?=./build +Python3_EXECUTABLE?=$(shell which python3) +BINDINGS_PYTHON_ENABLED=ON +DATAFLOW_EXECUTION_ENABLED=OFF +TIMING_ENABLED=OFF +CC_COMPILER= +CXX_COMPILER= +CUDA_SUPPORT?=OFF +CONCRETE_CORE_PATH?= $(shell pwd)/concrete-core +INSTALL_PREFIX?=$(abspath $(BUILD_DIR))/install +INSTALL_PATH=$(abspath $(INSTALL_PREFIX))/concretecompiler/ +MAKEFILE_ROOT_DIR=$(shell pwd) + +CONCRETE_OPTIMIZER_DIR ?= $(shell pwd)/concrete-optimizer + +KEYSETCACHEDEV=/tmp/KeySetCache +KEYSETCACHECI ?= ../KeySetCache +KEYSETCACHENAME ?= KeySetCacheV4 + +HPX_VERSION?=1.7.1 +HPX_URL=https://github.com/STEllAR-GROUP/hpx/archive/refs/tags/$(HPX_VERSION).tar.gz +HPX_TARBALL=$(shell pwd)/hpx-$(HPX_VERSION).tar.gz +HPX_LOCAL_DIR=$(shell pwd)/hpx-$(HPX_VERSION) +HPX_INSTALL_DIR?=$(HPX_LOCAL_DIR)/build + +ML_BENCH_SUBSET_ID= + +# Find OS +OS=undefined +ifeq ($(shell uname), Linux) + OS=linux +else ifeq ($(shell uname), Darwin) + OS=darwin +endif + +# Setup find arguments for MacOS +ifeq ($(OS), darwin) +FIND_EXECUTABLE_ARG=-perm +111 +else +FIND_EXECUTABLE_ARG=-executable +endif + +ARCHITECTURE=undefined +ifeq ($(shell uname -m), arm64) + ARCHITECTURE=aarch64 +else + ARCHITECTURE=amd64 +endif + +export PATH := $(abspath $(BUILD_DIR))/bin:$(PATH) + +ifeq ($(shell which ccache),) + CCACHE=OFF +else + CCACHE=ON +endif + +ifeq ($(CCACHE),ON) + CMAKE_CCACHE_OPTIONS=-DCMAKE_CXX_COMPILER_LAUNCHER=ccache +else + CMAKE_CCACHE_OPTIONS= +endif + +ifneq ($(CC_COMPILER),) + CC_COMPILER_OPTION=-DCMAKE_C_COMPILER=$(CC_COMPILER) +else + CC_COMPILER_OPTION= +endif + +ifneq ($(CXX_COMPILER),) + CXX_COMPILER_OPTION=-DCMAKE_CXX_COMPILER=$(CXX_COMPILER) +else + CXX_COMPILER_OPTION= +endif + +# don't run parallel python tests if compiler doesn't support it +ifeq ($(DATAFLOW_EXECUTION_ENABLED),ON) + PYTHON_TESTS_MARKER="" +else + PYTHON_TESTS_MARKER="not parallel" +endif + +all: concretecompiler python-bindings build-tests build-benchmarks build-mlbench doc rust-bindings + +# concrete-optimizer ###################################### + +LIB_CONCRETE_OPTIMIZER_CPP = $(CONCRETE_OPTIMIZER_DIR)/target/libconcrete_optimizer_cpp.a + +concrete-optimizer-lib: + make -C $(CONCRETE_OPTIMIZER_DIR)/concrete-optimizer-cpp $(LIB_CONCRETE_OPTIMIZER_CPP) + +# HPX ##################################################### + +install-hpx-from-source: $(HPX_LOCAL_DIR) + mkdir -p $(HPX_LOCAL_DIR)/build + cd $(HPX_LOCAL_DIR)/build && cmake \ + -DHPX_WITH_MAX_CPU_COUNT="" \ + -DHPX_WITH_FETCH_ASIO=on \ + -DHPX_FILESYSTEM_WITH_BOOST_FILESYSTEM_COMPATIBILITY=ON \ + -DHPX_WITH_MALLOC=system .. + cd $(HPX_LOCAL_DIR)/build && make -j2 + +$(HPX_TARBALL): + curl -L $(HPX_URL) -o $(HPX_TARBALL) + +$(HPX_LOCAL_DIR): $(HPX_TARBALL) + tar xzvf $(HPX_TARBALL) + +$(BUILD_DIR)/configured.stamp: + mkdir -p $(BUILD_DIR) + cmake -B $(BUILD_DIR) -GNinja ../llvm-project/llvm/ \ + $(CMAKE_CCACHE_OPTIONS) \ + $(CC_COMPILER_OPTION) \ + $(CXX_COMPILER_OPTION) \ + -DLLVM_ENABLE_PROJECTS="mlir;clang;openmp" \ + -DLLVM_BUILD_EXAMPLES=OFF \ + -DLLVM_TARGETS_TO_BUILD="host" \ + -DCMAKE_BUILD_TYPE=$(BUILD_TYPE) \ + -DLLVM_ENABLE_ASSERTIONS=ON \ + -DMLIR_ENABLE_BINDINGS_PYTHON=$(BINDINGS_PYTHON_ENABLED) \ + -DCONCRETELANG_BINDINGS_PYTHON_ENABLED=$(BINDINGS_PYTHON_ENABLED) \ + -DCONCRETELANG_DATAFLOW_EXECUTION_ENABLED=$(DATAFLOW_EXECUTION_ENABLED) \ + -DCONCRETELANG_TIMING_ENABLED=$(TIMING_ENABLED) \ + -DHPX_DIR=${HPX_INSTALL_DIR}/lib/cmake/HPX \ + -DLLVM_EXTERNAL_PROJECTS=concretelang \ + -DLLVM_EXTERNAL_CONCRETELANG_SOURCE_DIR=. \ + -DPython3_EXECUTABLE=${Python3_EXECUTABLE} \ + -DCONCRETE_OPTIMIZER_DIR=${CONCRETE_OPTIMIZER_DIR} \ + -DCONCRETE_CORE_PATH=$(CONCRETE_CORE_PATH) \ + -DCONCRETELANG_CUDA_SUPPORT=${CUDA_SUPPORT} \ + -DCUDAToolkit_ROOT=$(CUDA_PATH) + touch $@ + +build-initialized: concrete-optimizer-lib $(BUILD_DIR)/configured.stamp + +doc: build-initialized + cmake --build $(BUILD_DIR) --target mlir-doc + +concretecompiler: build-initialized + cmake --build $(BUILD_DIR) --target concretecompiler + +python-bindings: build-initialized + cmake --build $(BUILD_DIR) --target ConcretelangMLIRPythonModules + cmake --build $(BUILD_DIR) --target ConcretelangPythonModules + +rust-bindings: install + cd lib/Bindings/Rust && \ + CONCRETE_COMPILER_INSTALL_DIR=$(INSTALL_PATH) \ + cargo build --release + +CAPI: + cmake --build $(BUILD_DIR) --target CONCRETELANGCAPIFHE CONCRETELANGCAPIFHELINALG CONCRETELANGCAPISupport + +clientlib: build-initialized + cmake --build $(BUILD_DIR) --target ConcretelangClientLib + +serverlib: build-initialized + cmake --build $(BUILD_DIR) --target ConcretelangServerLib + + + +GITHUB_URL=https://api.github.com/repos/zama-ai/concrete-compiler-internal +GITHUB_URL_LIST_ARTIFACTS="${GITHUB_URL}/actions/artifacts?name=${KEYSETCACHENAME}&per_page=1" +CURL=curl -H"Accept: application/vnd.github.v3+json" -H"authorization: Bearer ${GITHUB_TOKEN}" +keysetcache.zip: REDIRECT_URL = $(shell ${CURL} -s ${GITHUB_URL_LIST_ARTIFACTS} | grep archive_download_url | grep -o 'http[^"]\+') +keysetcache.zip: + ${CURL} --location -o keysetcache.zip ${REDIRECT_URL} + du -h keysetcache.zip + +keysetcache_ci_populated: keysetcache.zip + unzip keysetcache.zip -d ${KEYSETCACHECI} + du -sh ${KEYSETCACHECI} + rm keysetcache.zip + +keysetcache_populated: keysetcache.zip + unzip keysetcache.zip -d ${KEYSETCACHEDEV} + du -sh ${KEYSETCACHEDEV} + rm keysetcache.zip + + +# test + +build-tests: build-unit-tests build-end-to-end-tests + +run-tests: run-check-tests run-unit-tests run-end-to-end-tests run-python-tests + +## check-tests + +run-check-tests: concretecompiler file-check not + $(BUILD_DIR)/bin/llvm-lit -v tests/check_tests + +## unit-tests + +build-unit-tests: build-initialized + cmake --build $(BUILD_DIR) --target ConcretelangUnitTests + +run-unit-tests: build-unit-tests + find $(BUILD_DIR)/tools/concretelang/tests/unit_tests -name unit_tests_concretelang* $(FIND_EXECUTABLE_ARG) -type f | xargs -n1 ./run_test_bin.sh + +## python-tests + +run-python-tests: python-bindings concretecompiler + PYTHONPATH=${PYTHONPATH}:$(BUILD_DIR)/tools/concretelang/python_packages/concretelang_core LD_PRELOAD=$(BUILD_DIR)/lib/libConcretelangRuntime.so pytest -vs -m $(PYTHON_TESTS_MARKER) tests/python + +test-compiler-file-output: concretecompiler + pytest -vs tests/test_compiler_file_output + + +## rust-tests +run-rust-tests: rust-bindings + cd lib/Bindings/Rust && \ + CONCRETE_COMPILER_INSTALL_DIR=$(INSTALL_PATH) \ + LD_LIBRARY_PATH=$(INSTALL_PATH)/lib \ + cargo test --release + +## end-to-end-tests + +build-end-to-end-jit-chunked-int: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_chunked_int + +build-end-to-end-jit-test: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_test + +build-end-to-end-test: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_test + +build-end-to-end-jit-encrypted-tensor: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_encrypted_tensor + +build-end-to-end-jit-fhelinalg: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_fhelinalg + +build-end-to-end-jit-lambda: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_lambda + +build-end-to-end-tests: build-end-to-end-jit-chunked-int build-end-to-end-jit-test build-end-to-end-test build-end-to-end-jit-encrypted-tensor build-end-to-end-jit-fhelinalg build-end-to-end-jit-lambda + +### end-to-end-tests CPU + +FIXTURE_CPU_DIR=tests/end_to_end_fixture/tests_cpu + +$(FIXTURE_CPU_DIR)/%.yaml: tests/end_to_end_fixture/%_gen.py + mkdir -p $(FIXTURE_CPU_DIR) + $(Python3_EXECUTABLE) $< > $@ + +$(FIXTURE_CPU_DIR)/bug_report.yaml: + unzip -o $(FIXTURE_CPU_DIR)/bug_report.zip -d $(FIXTURE_CPU_DIR) + +generate-cpu-tests: $(FIXTURE_CPU_DIR)/end_to_end_leveled.yaml $(FIXTURE_CPU_DIR)/end_to_end_apply_lookup_table.yaml $(FIXTURE_CPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml $(FIXTURE_CPU_DIR)/bug_report.yaml $(FIXTURE_CPU_DIR)/end_to_end_round.yaml + +SECURITY_TO_TEST=80 128 +run-end-to-end-tests: build-end-to-end-tests generate-cpu-tests + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_test + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_encrypted_tensor + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_fhelinalg + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_lambda + $(foreach security,$(SECURITY_TO_TEST),$(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_test \ + --backend=cpu --security-level=$(security) --jit $(FIXTURE_CPU_DIR)/*.yaml;) + +### end-to-end-tests GPU + +FIXTURE_GPU_DIR=tests/end_to_end_fixture/tests_gpu + +$(FIXTURE_GPU_DIR): + mkdir -p $(FIXTURE_GPU_DIR) + +$(FIXTURE_GPU_DIR)/end_to_end_apply_lookup_table.yaml: tests/end_to_end_fixture/end_to_end_apply_lookup_table_gen.py + $(Python3_EXECUTABLE) $< --bitwidth 1 2 3 4 5 6 7 > $@ + +$(FIXTURE_GPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml: tests/end_to_end_fixture/end_to_end_linalg_apply_lookup_table_gen.py + $(Python3_EXECUTABLE) $< --bitwidth 1 2 3 4 5 6 7 > $@ + + +generate-gpu-tests: $(FIXTURE_GPU_DIR) $(FIXTURE_GPU_DIR)/end_to_end_apply_lookup_table.yaml $(FIXTURE_GPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml + +run-end-to-end-tests-gpu: build-end-to-end-test generate-gpu-tests + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_test \ + --backend=gpu --library /tmp/concrete_compiler/gpu_tests/ \ + $(FIXTURE_GPU_DIR)/*.yaml + +## end-to-end-dataflow-tests + +build-end-to-end-dataflow-tests: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_jit_auto_parallelization + cmake --build $(BUILD_DIR) --target end_to_end_jit_distributed + cmake --build $(BUILD_DIR) --target end_to_end_jit_aes_short + +run-end-to-end-dataflow-tests: build-end-to-end-dataflow-tests + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_auto_parallelization + $(BUILD_DIR)/tools/concretelang/tests/end_to_end_tests/end_to_end_jit_distributed + +# benchmark + +build-benchmarks: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_benchmark + +## benchmark CPU + +BENCHMARK_CPU_DIR=tests/end_to_end_fixture/benchmarks_cpu + +$(BENCHMARK_CPU_DIR): + mkdir -p $@ + +$(BENCHMARK_CPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml: tests/end_to_end_fixture/end_to_end_linalg_apply_lookup_table_gen.py + $(Python3_EXECUTABLE) $< --n-ct 64 128 1024 > $@ + +$(BENCHMARK_CPU_DIR)/%.yaml: tests/end_to_end_fixture/%_gen.py + $(Python3_EXECUTABLE) $< > $@ + +generate-cpu-benchmarks: $(BENCHMARK_CPU_DIR) $(BENCHMARK_CPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml $(BENCHMARK_CPU_DIR)/end_to_end_apply_lookup_table.yaml + +SECURITY_TO_BENCH=128 +run-cpu-benchmarks: build-benchmarks generate-cpu-benchmarks + $(foreach security,$(SECURITY_TO_BENCH),$(BUILD_DIR)/bin/end_to_end_benchmark \ + --backend=cpu --security-level=$(security)\ + --benchmark_out=benchmarks_results.json --benchmark_out_format=json \ + $(BENCHMARK_CPU_DIR)/*.yaml;) + +FIXTURE_APPLICATION_DIR=tests/end_to_end_fixture/application/ + +run-cpu-benchmarks-application: + unzip $(FIXTURE_APPLICATION_DIR)/*.zip -d $(FIXTURE_APPLICATION_DIR) + $(BUILD_DIR)/bin/end_to_end_benchmark \ + --backend=cpu --benchmark_out=benchmarks_results.json --benchmark_out_format=json \ + $(FIXTURE_APPLICATION_DIR)*.yaml + +## benchmark GPU + +BENCHMARK_GPU_DIR=tests/end_to_end_fixture/benchmarks_gpu + +$(BENCHMARK_GPU_DIR): + mkdir -p $@ + +$(BENCHMARK_GPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml: tests/end_to_end_fixture/end_to_end_linalg_apply_lookup_table_gen.py + $(Python3_EXECUTABLE) $< \ + --bitwidth 1 2 3 4 5 6 7 --n-ct 1 128 1024 2048 8192 + + +generate-gpu-benchmarks: $(BENCHMARK_GPU_DIR) $(BENCHMARK_GPU_DIR)/end_to_end_linalg_apply_lookup_table.yaml + +run-gpu-benchmarks: build-benchmarks generate-cpu-benchmarks + $(BUILD_DIR)/bin/end_to_end_benchmark \ + --backend=gpu \ + --benchmark_out=benchmarks_results.json --benchmark_out_format=json \ + $(BENCHMARK_CPU_DIR)/*.yaml + + + +build-mlbench: build-initialized + cmake --build $(BUILD_DIR) --target end_to_end_mlbench + +generate-mlbench: + mkdir -p tests/end_to_end_benchmarks/mlbench + rm -rf tests/end_to_end_benchmarks/mlbench/* + unzip tests/end_to_end_benchmarks/mlbench.zip -d tests/end_to_end_benchmarks/mlbench + rm -f tests/end_to_end_benchmarks/mlbench/**/*\=* + find tests/end_to_end_benchmarks/mlbench -name "*.mlir" -exec sed -e '1d' -e 's/ func / func.func /g' -e 's/ linalg.tensor_/ tensor./g' -e '$$d' -i {} \; + $(Python3_EXECUTABLE) tests/end_to_end_benchmarks/generate_bench_yaml.py tests/end_to_end_benchmarks/mlbench tests/end_to_end_benchmarks/mlbench/end_to_end_mlbench + +run-mlbench: build-mlbench generate-mlbench + tests/end_to_end_benchmarks/end_to_end_mlbench.sh tests/end_to_end_benchmarks/mlbench/ $(BUILD_DIR)/bin/end_to_end_mlbench + +run-mlbench-subset: build-mlbench generate-mlbench + @[ "${ML_BENCH_SUBSET_ID}" ] || ( echo "ML_BENCH_SUBSET_ID is not set"; exit 1 ) + tests/end_to_end_benchmarks/end_to_end_mlbench.sh tests/end_to_end_benchmarks/mlbench/end_to_end_mlbench_$(ML_BENCH_SUBSET_ID).yaml $(BUILD_DIR)/bin/end_to_end_mlbench + +show-stress-tests-summary: + @echo '------ Stress tests summary ------' + @echo + @echo 'Rates:' + @cd tests/stress_tests/trace && grep success_rate -R + @echo + @echo 'Parameters issues:' + @cd tests/stress_tests/trace && grep BAD -R || echo 'No issues' + +stress-tests: concretecompiler + pytest -vs tests/stress_tests + +# useful for faster cache generation, need pytest-parallel +stress-tests-fast-cache: concretecompiler + pytest --workers auto -vs tests/stress_tests + +# LLVM/MLIR dependencies + +all-deps: file-check not + +file-check: build-initialized + cmake --build $(BUILD_DIR) --target FileCheck + +not: build-initialized + cmake --build $(BUILD_DIR) --target not + +mlir-cpu-runner: build-initialized + cmake --build $(BUILD_DIR) --target mlir-cpu-runner + +opt: build-initialized + cmake --build $(BUILD_DIR) --target opt + +mlir-opt: build-initialized + cmake --build $(BUILD_DIR) --target mlir-opt + +mlir-translate: build-initialized + cmake --build $(BUILD_DIR) --target mlir-translate + +update-python-version: + echo "__version__ = \"`git describe --tags --abbrev=0 | grep -e '[0-9].*' -o`\"" > lib/Bindings/Python/version.txt + +check-python-format: + black --check tests/python/ lib/Bindings/Python/concrete/ + +python-format: + black tests/python/ lib/Bindings/Python/concrete/ + +python-lint: + pylint --rcfile=../pylintrc lib/Bindings/Python/concrete/compiler + +check-rust-format: + cd lib/Bindings/Rust && cargo fmt --check + +rust-format: + cd lib/Bindings/Rust && cargo fmt + +# libraries we want to have in the installation that aren't already a deps of other targets +install-deps: + cmake --build $(BUILD_DIR) --target MLIRCAPIRegistration + +ifeq ($(OS), darwin) +# rsync should normally come pre-installed on macOS +# and the --parents only exists for GNU's cp not BSD's cp +HIERARCHY_PRESERVING_COPY=rsync -R +else +HIERARCHY_PRESERVING_COPY=cp --parents +endif + +ifeq ($(OS),Windows_NT) + detected_OS := Windows +else + detected_OS := $(shell sh -c 'uname 2>/dev/null || echo Unknown') +endif + +PIP=$(Python3_EXECUTABLE) -m pip +PIP_WHEEL=$(PIP) wheel --no-deps -w $(BUILD_DIR)/wheels . +AUDIT_WHEEL_REPAIR=$(Python3_EXECUTABLE) -m auditwheel repair -w $(BUILD_DIR)/wheels + +linux-python-package: + $(PIP) install wheel auditwheel + # We need to run it twice: the first will generate the directories, so that + # the second run can find the packages via find_namespace_packages + $(PIP_WHEEL) + $(PIP_WHEEL) + GLIBC_VER=$(shell ldd --version | head -n 1 | grep -o '[^ ]*$$'|head|tr '.' '_'); \ + for PLATFORM in manylinux_$${GLIBC_VER}_x86_64 linux_x86_64; do \ + if $(AUDIT_WHEEL_REPAIR) $(BUILD_DIR)/wheels/*.whl --plat $$PLATFORM; then \ + echo Success for $$PLATFORM; \ + break; \ + else \ + echo No repair with $$PLATFORM; \ + fi \ + done + +darwin-python-package: + $(PIP) install wheel delocate + $(PIP_WHEEL) + delocate-wheel -v $(BUILD_DIR)/wheels/*macosx*.whl + +python-package: python-bindings $(OS)-python-package + @echo The python package is: $(BUILD_DIR)/wheels/*.whl + +install: concretecompiler concrete-optimizer-lib CAPI install-deps + $(info Install prefix set to $(INSTALL_PREFIX)) + $(info Installing under $(INSTALL_PATH)) + mkdir -p $(INSTALL_PATH)/include + cp -R $(abspath $(BUILD_DIR))/bin $(INSTALL_PATH) + cp -R $(abspath $(BUILD_DIR))/lib $(INSTALL_PATH) + cp $(LIB_CONCRETE_OPTIMIZER_CPP) $(INSTALL_PATH)/lib/ + cp $(CONCRETE_OPTIMIZER_DIR)/concrete-optimizer-cpp/src/cpp/concrete-optimizer.hpp $(INSTALL_PATH)/include + + # Doing find + grep + while loop is a way to have portable behaviour between macOS and GNU/Linux + # as with `find . -regex "regex"`, the regex language is not the same / to have the same language, the + # command changes (macOs: `find -E . -regex`, GNU: `find . -regextype posix-extended "regex") + cd $(MAKEFILE_ROOT_DIR)/include && \ + find . | \ + grep "^.*\.\(h\|hpp\|td\)$$" | \ + while read filepath; do $(HIERARCHY_PRESERVING_COPY) $$filepath $(INSTALL_PATH)/include; done + cd $(MAKEFILE_ROOT_DIR)/../llvm-project/llvm/include && \ + find . | \ + grep "^.*\.\(h\|hpp\|td\)$$" | \ + while read filepath; do $(HIERARCHY_PRESERVING_COPY) $$filepath $(INSTALL_PATH)/include; done + cd $(MAKEFILE_ROOT_DIR)/../llvm-project/mlir/include && \ + find . | \ + grep "^.*\.\(h\|hpp\|td\)$$" | \ + while read filepath; do $(HIERARCHY_PRESERVING_COPY) $$filepath $(INSTALL_PATH)/include; done + + cd $(abspath $(BUILD_DIR))/include && find . -iname '*.inc' -exec $(HIERARCHY_PRESERVING_COPY) {} $(INSTALL_PATH)/include \; + cd $(abspath $(BUILD_DIR))/tools/concretelang/include && find . -iname '*.inc' -exec $(HIERARCHY_PRESERVING_COPY) {} $(INSTALL_PATH)/include \; + cd $(abspath $(BUILD_DIR))/tools/mlir/include && find . -iname '*.inc' -exec $(HIERARCHY_PRESERVING_COPY) {} $(INSTALL_PATH)/include \; + +.PHONY: build-initialized \ + build-end-to-end-jit \ + concretecompiler \ + python-bindings \ + add-deps \ + file-check \ + not \ + update-python-version \ + python-lint \ + python-format \ + check-python-format \ + concrete-optimizer-lib \ + build-tests \ + run-tests \ + run-check-tests \ + build-unit-tests \ + run-unit-tests \ + run-python-tests \ + build-end-to-end-tests \ + build-end-to-end-dataflow-tests \ + run-end-to-end-dataflow-tests \ + opt \ + mlir-opt \ + mlir-cpu-runner \ + mlir-translate diff --git a/compilers/concrete-compiler/compiler/README.md b/compilers/concrete-compiler/compiler/README.md new file mode 100644 index 000000000..0ca6da9c8 --- /dev/null +++ b/compilers/concrete-compiler/compiler/README.md @@ -0,0 +1,142 @@ +# Concrete Compiler + +The Concrete Compiler is a set of tools that allows the compilation and from an high-level and crypto free representation of an arithmetic circuit of operations on encrypted integers. +This compiler is based on the [MLIR project](https://mlir.llvm.org/) it use the framework, the standard dialects exposed by MLIR and define new fhe specific dialects and passes to lower the high-level fhe dialects to standard MLIR dialects. + +## Getting started + +The source of the project is located in the `compiler` directory. + +```sh +cd compiler +``` + +### Prerequisite: Building HPX and enable dataflow parallelism (optional) + +In order to implement the dataflow parallelism and the distribution of the computation we use the [HPX Standard Library](https://hpx-docs.stellar-group.org/). You can else use your own HPX installation by set the `HPX_INSTALL_DIR` environment variable or you can install HPX on the default path of our build system thanks the following command: + +```sh +make install-hpx-from-source +``` + +This may fail on some systems when dependencies are missing. Some recent packages required are Cmake, HWLOC and BOOST. For full details see [HPX Quickstart guide](https://hpx-docs.stellar-group.org/tags/1.7.1/html/quickstart.html). +Once you have a proper installation of HPX to enable the dataflow parallelism set the `DATAFLOW_EXECUTION_ENABLED=ON`. + +### Prerequisite: Fetch git submodules + +This project rely on `llvm-project` and `concrete-optimizer` as git submodules so you need to initialize and update the git submodules. + +```sh +git submodule init +git submodule update +``` + +### Prerequisite: python packages + +Install MLIR python requirements in your dev python environment: + +```bash +# From repo root +pip install -r ./llvm-project/mlir/python/requirements.txt +# From compiler dir +pip install -r ../llvm-project/mlir/python/requirements.txt +``` + +### Build from source + +We use cmake as the main build system but in order to initialize the build system and define straightforward target for the main artifacts of the project. You can initialize and build all the main artifacts thanks the following command: + +```sh +make all +``` + +or in several steps: + +Generate the compiler build system, in a `build-*` directory + +```sh +make build-initialized +``` + +Build the compiler + +```sh +make concretecompiler +``` + +Run the compiler + +```sh +./build-Release/bin/concretecompiler +``` + +### Installation from source + +You can install libs, bins, and include files into a specific directory by running: + +```sh +make INSTALL_PREFIX=/your/directory install +``` + +You will then find `lib`, `bin`, and `include` under `/your/directory/concretecompiler`. + +### Tests + +You can build all the tests with the following command: + +```sh +make build-tests +``` + +and run them with: + +```sh +make run-tests +``` + +### Benchmarks + +You can build all the benchmarks with the following command: + +```sh +make build-benchmarks +``` + +and run them with: + +```sh +make run-benchmarks +``` + +## Build releases + +### Build tarball + +You can create a tarball containing libs, bins, and include files for the tools of the compiler, by following previous steps of [installation from source](#installation-from-source), then creating a tar archive from the installation directory. + +### Build the Python Package + +Currently supported platforms: +- Linux x86_64 for python 3.7, 3.8, 3.9, and 3.10 + +pybind11 is required to build the python package, you can install it in your current environment with: + +```bash +$ pip install pybind11 +``` + +To specify which python executable to target you can specify the `Python3_EXECUTABLE` environment variable. + +#### Build wheels in your environment + +Building the wheels is actually simple. + +```bash +$ pip wheel --no-deps -w ../wheels . +``` + +Depending on the platform you are using (specially Linux), you might need to use `auditwheel` to specify the platform this wheel is targeting. For example, in our build of the package for Linux x86_64 and GLIBC 2.24, we also run: + +```bash +$ auditwheel repair ../wheels/*.whl --plat manylinux_2_24_x86_64 -w ../wheels +``` diff --git a/compilers/concrete-compiler/compiler/RELEASE_README.md b/compilers/concrete-compiler/compiler/RELEASE_README.md new file mode 100644 index 000000000..f6e3ef1cf --- /dev/null +++ b/compilers/concrete-compiler/compiler/RELEASE_README.md @@ -0,0 +1,3 @@ +# Concrete Compiler + +The Concrete Compiler takes a high level computation model and produces a programs that evaluate the model in an homomorphic way. diff --git a/compilers/concrete-compiler/compiler/cmake/modules/AddConcretelangDoc.cmake b/compilers/concrete-compiler/compiler/cmake/modules/AddConcretelangDoc.cmake new file mode 100644 index 000000000..4fb0ea823 --- /dev/null +++ b/compilers/concrete-compiler/compiler/cmake/modules/AddConcretelangDoc.cmake @@ -0,0 +1,9 @@ +include(AddMLIR) + +function(add_concretelang_doc doc_filename output_file output_directory command) + set(SAVED_MLIR_BINARY_DIR ${MLIR_BINARY_DIR}) + set(MLIR_BINARY_DIR ${CONCRETELANG_BINARY_DIR}) + add_mlir_doc(${doc_filename} ${output_file} ${output_directory} ${command} ${ARGN}) + set(MLIR_BINARY_DIR ${SAVED_MLIR_BINARY_DIR}) + unset(SAVED_MLIR_BINARY_DIR) +endfunction() diff --git a/compilers/concrete-compiler/compiler/concrete-core b/compilers/concrete-compiler/compiler/concrete-core new file mode 160000 index 000000000..bf79f5db6 --- /dev/null +++ b/compilers/concrete-compiler/compiler/concrete-core @@ -0,0 +1 @@ +Subproject commit bf79f5db635cff7a224a44d01918aa6cf59b5493 diff --git a/compilers/concrete-compiler/compiler/concrete-cpu b/compilers/concrete-compiler/compiler/concrete-cpu new file mode 160000 index 000000000..db262714c --- /dev/null +++ b/compilers/concrete-compiler/compiler/concrete-cpu @@ -0,0 +1 @@ +Subproject commit db262714cde546344d25f0a81e7974fd0277a55f diff --git a/compilers/concrete-compiler/compiler/concrete-optimizer b/compilers/concrete-compiler/compiler/concrete-optimizer new file mode 160000 index 000000000..85abbeada --- /dev/null +++ b/compilers/concrete-compiler/compiler/concrete-optimizer @@ -0,0 +1 @@ +Subproject commit 85abbeadaed27dcc709969cb3f0d1b2afcaf5491 diff --git a/compilers/concrete-compiler/compiler/hpx.ini b/compilers/concrete-compiler/compiler/hpx.ini new file mode 100644 index 000000000..03605df68 --- /dev/null +++ b/compilers/concrete-compiler/compiler/hpx.ini @@ -0,0 +1,27 @@ +[hpx] +location = ${HPX_LOCATION:$[system.prefix]} +component_path = $[hpx.location]/lib/hpx:$[system.executable_prefix]/lib/hpx:$[system.executable_prefix]/../lib/hpx +master_ini_path = $[hpx.location]/share/hpx-:$[system.executable_prefix]/share/hpx-:$[system.executable_prefix]/../share/hpx- +ini_path = $[hpx.master_ini_path]/ini +os_threads = 2 +localities = 1 +program_name = +cmd_line = +lock_detection = ${HPX_LOCK_DETECTION:0} +throw_on_held_lock = ${HPX_THROW_ON_HELD_LOCK:1} +minimal_deadlock_detection = +spinlock_deadlock_detection = +spinlock_deadlock_detection_limit = ${HPX_SPINLOCK_DEADLOCK_DETECTION_LIMIT:1000000} +max_background_threads = ${HPX_MAX_BACKGROUND_THREADS:$[hpx.os_threads]} +max_idle_loop_count = ${HPX_MAX_IDLE_LOOP_COUNT:} +max_busy_loop_count = ${HPX_MAX_BUSY_LOOP_COUNT:} +max_idle_backoff_time = ${HPX_MAX_IDLE_BACKOFF_TIME:} +exception_verbosity = ${HPX_EXCEPTION_VERBOSITY:1} +default_stack_size = 0x20000000 + +[hpx.stacks] +small_size = 0x8000000 +medium_size = 0x10000000 +large_size = 0x20000000 +huge_size = 0x40000000 +use_guard_pages = ${HPX_THREAD_GUARD_PAGE:3} diff --git a/compilers/concrete-compiler/compiler/include/CMakeLists.txt b/compilers/concrete-compiler/compiler/include/CMakeLists.txt new file mode 100644 index 000000000..be358a95b --- /dev/null +++ b/compilers/concrete-compiler/compiler/include/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(concretelang) diff --git a/compilers/concrete-compiler/compiler/include/boost-single-header/outcome.hpp b/compilers/concrete-compiler/compiler/include/boost-single-header/outcome.hpp new file mode 100644 index 000000000..45fa48f96 --- /dev/null +++ b/compilers/concrete-compiler/compiler/include/boost-single-header/outcome.hpp @@ -0,0 +1,7939 @@ +/* Include the default amount of outcome +(C) 2018-2021 Niall Douglas (4 commits) +File Created: Mar 2018 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#if !OUTCOME_ENABLE_CXX_MODULES || !0 || defined(GENERATING_OUTCOME_MODULE_INTERFACE) || OUTCOME_DISABLE_CXX_MODULES +/* Tells C++ coroutines about Outcome's result +(C) 2019 Niall Douglas (12 commits) +File Created: Oct 2019 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_COROUTINE_SUPPORT_HPP +#define OUTCOME_COROUTINE_SUPPORT_HPP +/* Configure Outcome with QuickCppLib +(C) 2015-2021 Niall Douglas (24 commits) +File Created: August 2015 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_V2_CONFIG_HPP +#define OUTCOME_V2_CONFIG_HPP +/* Sets Outcome version +(C) 2017-2019 Niall Douglas (4 commits) + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +/*! AWAITING HUGO JSON CONVERSION TOOL */ +#define OUTCOME_VERSION_MAJOR 2 +/*! AWAITING HUGO JSON CONVERSION TOOL */ +#define OUTCOME_VERSION_MINOR 2 +/*! AWAITING HUGO JSON CONVERSION TOOL */ +#define OUTCOME_VERSION_PATCH 0 +/*! AWAITING HUGO JSON CONVERSION TOOL */ +#define OUTCOME_VERSION_REVISION 0 // Revision version for cmake and DLL version stamping +/*! AWAITING HUGO JSON CONVERSION TOOL */ +#ifndef OUTCOME_DISABLE_ABI_PERMUTATION +#define OUTCOME_UNSTABLE_VERSION +#endif +// Pull in detection of __MINGW64_VERSION_MAJOR +#if defined(__MINGW32__) && !0 +#include <_mingw.h> +#endif +/* Configure QuickCppLib +(C) 2016-2021 Niall Douglas (8 commits) + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef QUICKCPPLIB_CONFIG_HPP +#define QUICKCPPLIB_CONFIG_HPP +/* Provides SG-10 feature checking for all C++ compilers +(C) 2014-2017 Niall Douglas (13 commits) +File Created: Nov 2014 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef QUICKCPPLIB_HAS_FEATURE_H +#define QUICKCPPLIB_HAS_FEATURE_H +#if __cplusplus >= 201103 +// Some of these macros ended up getting removed by ISO standards, +// they are prefixed with //// +////#if !defined(__cpp_alignas) +////#define __cpp_alignas 190000 +////#endif +////#if !defined(__cpp_default_function_template_args) +////#define __cpp_default_function_template_args 190000 +////#endif +////#if !defined(__cpp_defaulted_functions) +////#define __cpp_defaulted_functions 190000 +////#endif +////#if !defined(__cpp_deleted_functions) +////#define __cpp_deleted_functions 190000 +////#endif +////#if !defined(__cpp_generalized_initializers) +////#define __cpp_generalized_initializers 190000 +////#endif +////#if !defined(__cpp_implicit_moves) +////#define __cpp_implicit_moves 190000 +////#endif +////#if !defined(__cpp_inline_namespaces) +////#define __cpp_inline_namespaces 190000 +////#endif +////#if !defined(__cpp_local_type_template_args) +////#define __cpp_local_type_template_args 190000 +////#endif +////#if !defined(__cpp_noexcept) +////#define __cpp_noexcept 190000 +////#endif +////#if !defined(__cpp_nonstatic_member_init) +////#define __cpp_nonstatic_member_init 190000 +////#endif +////#if !defined(__cpp_nullptr) +////#define __cpp_nullptr 190000 +////#endif +////#if !defined(__cpp_override_control) +////#define __cpp_override_control 190000 +////#endif +////#if !defined(__cpp_thread_local) +////#define __cpp_thread_local 190000 +////#endif +////#if !defined(__cpp_auto_type) +////#define __cpp_auto_type 190000 +////#endif +////#if !defined(__cpp_strong_enums) +////#define __cpp_strong_enums 190000 +////#endif +////#if !defined(__cpp_trailing_return) +////#define __cpp_trailing_return 190000 +////#endif +////#if !defined(__cpp_unrestricted_unions) +////#define __cpp_unrestricted_unions 190000 +////#endif +#if !defined(__cpp_alias_templates) +#define __cpp_alias_templates 190000 +#endif +#if !defined(__cpp_attributes) +#define __cpp_attributes 190000 +#endif +#if !defined(__cpp_constexpr) +#if __cplusplus >= 201402 +#define __cpp_constexpr 201304 // relaxed constexpr +#else +#define __cpp_constexpr 190000 +#endif +#endif +#if !defined(__cpp_decltype) +#define __cpp_decltype 190000 +#endif +#if !defined(__cpp_delegating_constructors) +#define __cpp_delegating_constructors 190000 +#endif +#if !defined(__cpp_explicit_conversion) //// renamed from __cpp_explicit_conversions +#define __cpp_explicit_conversion 190000 +#endif +#if !defined(__cpp_inheriting_constructors) +#define __cpp_inheriting_constructors 190000 +#endif +#if !defined(__cpp_initializer_lists) //// NEW +#define __cpp_initializer_lists 190000 +#endif +#if !defined(__cpp_lambdas) +#define __cpp_lambdas 190000 +#endif +#if !defined(__cpp_nsdmi) +#define __cpp_nsdmi 190000 //// NEW +#endif +#if !defined(__cpp_range_based_for) //// renamed from __cpp_range_for +#define __cpp_range_based_for 190000 +#endif +#if !defined(__cpp_raw_strings) +#define __cpp_raw_strings 190000 +#endif +#if !defined(__cpp_ref_qualifiers) //// renamed from __cpp_reference_qualified_functions +#define __cpp_ref_qualifiers 190000 +#endif +#if !defined(__cpp_rvalue_references) +#define __cpp_rvalue_references 190000 +#endif +#if !defined(__cpp_static_assert) +#define __cpp_static_assert 190000 +#endif +#if !defined(__cpp_unicode_characters) //// NEW +#define __cpp_unicode_characters 190000 +#endif +#if !defined(__cpp_unicode_literals) +#define __cpp_unicode_literals 190000 +#endif +#if !defined(__cpp_user_defined_literals) +#define __cpp_user_defined_literals 190000 +#endif +#if !defined(__cpp_variadic_templates) +#define __cpp_variadic_templates 190000 +#endif +#endif +#if __cplusplus >= 201402 +// Some of these macros ended up getting removed by ISO standards, +// they are prefixed with //// +////#if !defined(__cpp_contextual_conversions) +////#define __cpp_contextual_conversions 190000 +////#endif +////#if !defined(__cpp_digit_separators) +////#define __cpp_digit_separators 190000 +////#endif +////#if !defined(__cpp_relaxed_constexpr) +////#define __cpp_relaxed_constexpr 190000 +////#endif +////#if !defined(__cpp_runtime_arrays) +////# define __cpp_runtime_arrays 190000 +////#endif +#if !defined(__cpp_aggregate_nsdmi) +#define __cpp_aggregate_nsdmi 190000 +#endif +#if !defined(__cpp_binary_literals) +#define __cpp_binary_literals 190000 +#endif +#if !defined(__cpp_decltype_auto) +#define __cpp_decltype_auto 190000 +#endif +#if !defined(__cpp_generic_lambdas) +#define __cpp_generic_lambdas 190000 +#endif +#if !defined(__cpp_init_captures) +#define __cpp_init_captures 190000 +#endif +#if !defined(__cpp_return_type_deduction) +#define __cpp_return_type_deduction 190000 +#endif +#if !defined(__cpp_sized_deallocation) +#define __cpp_sized_deallocation 190000 +#endif +#if !defined(__cpp_variable_templates) +#define __cpp_variable_templates 190000 +#endif +#endif +// VS2010: _MSC_VER=1600 +// VS2012: _MSC_VER=1700 +// VS2013: _MSC_VER=1800 +// VS2015: _MSC_VER=1900 +// VS2017: _MSC_VER=1910 +#if defined(_MSC_VER) && !defined(__clang__) +#if !defined(__cpp_exceptions) && defined(_CPPUNWIND) +#define __cpp_exceptions 190000 +#endif +#if !defined(__cpp_rtti) && defined(_CPPRTTI) +#define __cpp_rtti 190000 +#endif +// C++ 11 +#if !defined(__cpp_alias_templates) && _MSC_VER >= 1800 +#define __cpp_alias_templates 190000 +#endif +#if !defined(__cpp_attributes) +#define __cpp_attributes 190000 +#endif +#if !defined(__cpp_constexpr) && _MSC_FULL_VER >= 190023506 /* VS2015 */ +#define __cpp_constexpr 190000 +#endif +#if !defined(__cpp_decltype) && _MSC_VER >= 1600 +#define __cpp_decltype 190000 +#endif +#if !defined(__cpp_delegating_constructors) && _MSC_VER >= 1800 +#define __cpp_delegating_constructors 190000 +#endif +#if !defined(__cpp_explicit_conversion) && _MSC_VER >= 1800 +#define __cpp_explicit_conversion 190000 +#endif +#if !defined(__cpp_inheriting_constructors) && _MSC_VER >= 1900 +#define __cpp_inheriting_constructors 190000 +#endif +#if !defined(__cpp_initializer_lists) && _MSC_VER >= 1900 +#define __cpp_initializer_lists 190000 +#endif +#if !defined(__cpp_lambdas) && _MSC_VER >= 1600 +#define __cpp_lambdas 190000 +#endif +#if !defined(__cpp_nsdmi) && _MSC_VER >= 1900 +#define __cpp_nsdmi 190000 +#endif +#if !defined(__cpp_range_based_for) && _MSC_VER >= 1700 +#define __cpp_range_based_for 190000 +#endif +#if !defined(__cpp_raw_strings) && _MSC_VER >= 1800 +#define __cpp_raw_strings 190000 +#endif +#if !defined(__cpp_ref_qualifiers) && _MSC_VER >= 1900 +#define __cpp_ref_qualifiers 190000 +#endif +#if !defined(__cpp_rvalue_references) && _MSC_VER >= 1600 +#define __cpp_rvalue_references 190000 +#endif +#if !defined(__cpp_static_assert) && _MSC_VER >= 1600 +#define __cpp_static_assert 190000 +#endif +//#if !defined(__cpp_unicode_literals) +//# define __cpp_unicode_literals 190000 +//#endif +#if !defined(__cpp_user_defined_literals) && _MSC_VER >= 1900 +#define __cpp_user_defined_literals 190000 +#endif +#if !defined(__cpp_variadic_templates) && _MSC_VER >= 1800 +#define __cpp_variadic_templates 190000 +#endif +// C++ 14 +//#if !defined(__cpp_aggregate_nsdmi) +//#define __cpp_aggregate_nsdmi 190000 +//#endif +#if !defined(__cpp_binary_literals) && _MSC_VER >= 1900 +#define __cpp_binary_literals 190000 +#endif +#if !defined(__cpp_decltype_auto) && _MSC_VER >= 1900 +#define __cpp_decltype_auto 190000 +#endif +#if !defined(__cpp_generic_lambdas) && _MSC_VER >= 1900 +#define __cpp_generic_lambdas 190000 +#endif +#if !defined(__cpp_init_captures) && _MSC_VER >= 1900 +#define __cpp_init_captures 190000 +#endif +#if !defined(__cpp_return_type_deduction) && _MSC_VER >= 1900 +#define __cpp_return_type_deduction 190000 +#endif +#if !defined(__cpp_sized_deallocation) && _MSC_VER >= 1900 +#define __cpp_sized_deallocation 190000 +#endif +#if !defined(__cpp_variable_templates) && _MSC_FULL_VER >= 190023506 +#define __cpp_variable_templates 190000 +#endif +#endif // _MSC_VER +// Much to my surprise, GCC's support of these is actually incomplete, so fill in the gaps +#if (defined(__GNUC__) && !defined(__clang__)) +#define QUICKCPPLIB_GCC (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) +#if !defined(__cpp_exceptions) && defined(__EXCEPTIONS) +#define __cpp_exceptions 190000 +#endif +#if !defined(__cpp_rtti) && defined(__GXX_RTTI) +#define __cpp_rtti 190000 +#endif +// C++ 11 +#if defined(__GXX_EXPERIMENTAL_CXX0X__) +#if !defined(__cpp_alias_templates) && (QUICKCPPLIB_GCC >= 40700) +#define __cpp_alias_templates 190000 +#endif +#if !defined(__cpp_attributes) && (QUICKCPPLIB_GCC >= 40800) +#define __cpp_attributes 190000 +#endif +#if !defined(__cpp_constexpr) && (QUICKCPPLIB_GCC >= 40600) +#define __cpp_constexpr 190000 +#endif +#if !defined(__cpp_decltype) && (QUICKCPPLIB_GCC >= 40300) +#define __cpp_decltype 190000 +#endif +#if !defined(__cpp_delegating_constructors) && (QUICKCPPLIB_GCC >= 40700) +#define __cpp_delegating_constructors 190000 +#endif +#if !defined(__cpp_explicit_conversion) && (QUICKCPPLIB_GCC >= 40500) +#define __cpp_explicit_conversion 190000 +#endif +#if !defined(__cpp_inheriting_constructors) && (QUICKCPPLIB_GCC >= 40800) +#define __cpp_inheriting_constructors 190000 +#endif +#if !defined(__cpp_initializer_lists) && (QUICKCPPLIB_GCC >= 40800) +#define __cpp_initializer_lists 190000 +#endif +#if !defined(__cpp_lambdas) && (QUICKCPPLIB_GCC >= 40500) +#define __cpp_lambdas 190000 +#endif +#if !defined(__cpp_nsdmi) && (QUICKCPPLIB_GCC >= 40700) +#define __cpp_nsdmi 190000 +#endif +#if !defined(__cpp_range_based_for) && (QUICKCPPLIB_GCC >= 40600) +#define __cpp_range_based_for 190000 +#endif +#if !defined(__cpp_raw_strings) && (QUICKCPPLIB_GCC >= 40500) +#define __cpp_raw_strings 190000 +#endif +#if !defined(__cpp_ref_qualifiers) && (QUICKCPPLIB_GCC >= 40801) +#define __cpp_ref_qualifiers 190000 +#endif +// __cpp_rvalue_reference deviation +#if !defined(__cpp_rvalue_references) && defined(__cpp_rvalue_reference) +#define __cpp_rvalue_references __cpp_rvalue_reference +#endif +#if !defined(__cpp_static_assert) && (QUICKCPPLIB_GCC >= 40300) +#define __cpp_static_assert 190000 +#endif +#if !defined(__cpp_unicode_characters) && (QUICKCPPLIB_GCC >= 40500) +#define __cpp_unicode_characters 190000 +#endif +#if !defined(__cpp_unicode_literals) && (QUICKCPPLIB_GCC >= 40500) +#define __cpp_unicode_literals 190000 +#endif +#if !defined(__cpp_user_defined_literals) && (QUICKCPPLIB_GCC >= 40700) +#define __cpp_user_defined_literals 190000 +#endif +#if !defined(__cpp_variadic_templates) && (QUICKCPPLIB_GCC >= 40400) +#define __cpp_variadic_templates 190000 +#endif +// C++ 14 +// Every C++ 14 supporting GCC does the right thing here +#endif // __GXX_EXPERIMENTAL_CXX0X__ +#endif // GCC +// clang deviates in some places from the present SG-10 draft, plus older +// clangs are quite incomplete +#if defined(__clang__) +#define QUICKCPPLIB_CLANG (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) +#if !defined(__cpp_exceptions) && (defined(__EXCEPTIONS) || defined(_CPPUNWIND)) +#define __cpp_exceptions 190000 +#endif +#if !defined(__cpp_rtti) && (defined(__GXX_RTTI) || defined(_CPPRTTI)) +#define __cpp_rtti 190000 +#endif +// C++ 11 +#if defined(__GXX_EXPERIMENTAL_CXX0X__) +#if !defined(__cpp_alias_templates) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_alias_templates 190000 +#endif +#if !defined(__cpp_attributes) && (QUICKCPPLIB_CLANG >= 30300) +#define __cpp_attributes 190000 +#endif +#if !defined(__cpp_constexpr) && (QUICKCPPLIB_CLANG >= 30100) +#define __cpp_constexpr 190000 +#endif +#if !defined(__cpp_decltype) && (QUICKCPPLIB_CLANG >= 20900) +#define __cpp_decltype 190000 +#endif +#if !defined(__cpp_delegating_constructors) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_delegating_constructors 190000 +#endif +#if !defined(__cpp_explicit_conversion) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_explicit_conversion 190000 +#endif +#if !defined(__cpp_inheriting_constructors) && (QUICKCPPLIB_CLANG >= 30300) +#define __cpp_inheriting_constructors 190000 +#endif +#if !defined(__cpp_initializer_lists) && (QUICKCPPLIB_CLANG >= 30100) +#define __cpp_initializer_lists 190000 +#endif +#if !defined(__cpp_lambdas) && (QUICKCPPLIB_CLANG >= 30100) +#define __cpp_lambdas 190000 +#endif +#if !defined(__cpp_nsdmi) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_nsdmi 190000 +#endif +#if !defined(__cpp_range_based_for) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_range_based_for 190000 +#endif +// __cpp_raw_string_literals deviation +#if !defined(__cpp_raw_strings) && defined(__cpp_raw_string_literals) +#define __cpp_raw_strings __cpp_raw_string_literals +#endif +#if !defined(__cpp_raw_strings) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_raw_strings 190000 +#endif +#if !defined(__cpp_ref_qualifiers) && (QUICKCPPLIB_CLANG >= 20900) +#define __cpp_ref_qualifiers 190000 +#endif +// __cpp_rvalue_reference deviation +#if !defined(__cpp_rvalue_references) && defined(__cpp_rvalue_reference) +#define __cpp_rvalue_references __cpp_rvalue_reference +#endif +#if !defined(__cpp_rvalue_references) && (QUICKCPPLIB_CLANG >= 20900) +#define __cpp_rvalue_references 190000 +#endif +#if !defined(__cpp_static_assert) && (QUICKCPPLIB_CLANG >= 20900) +#define __cpp_static_assert 190000 +#endif +#if !defined(__cpp_unicode_characters) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_unicode_characters 190000 +#endif +#if !defined(__cpp_unicode_literals) && (QUICKCPPLIB_CLANG >= 30000) +#define __cpp_unicode_literals 190000 +#endif +// __cpp_user_literals deviation +#if !defined(__cpp_user_defined_literals) && defined(__cpp_user_literals) +#define __cpp_user_defined_literals __cpp_user_literals +#endif +#if !defined(__cpp_user_defined_literals) && (QUICKCPPLIB_CLANG >= 30100) +#define __cpp_user_defined_literals 190000 +#endif +#if !defined(__cpp_variadic_templates) && (QUICKCPPLIB_CLANG >= 20900) +#define __cpp_variadic_templates 190000 +#endif +// C++ 14 +// Every C++ 14 supporting clang does the right thing here +#endif // __GXX_EXPERIMENTAL_CXX0X__ +#endif // clang +#endif +#ifndef QUICKCPPLIB_DISABLE_ABI_PERMUTATION +// Note the second line of this file must ALWAYS be the git SHA, third line ALWAYS the git SHA update time +#define QUICKCPPLIB_PREVIOUS_COMMIT_REF e691a6dc0358c1091d59022af06a97d68fcc074d +#define QUICKCPPLIB_PREVIOUS_COMMIT_DATE "2021-09-15 10:28:22 +00:00" +#define QUICKCPPLIB_PREVIOUS_COMMIT_UNIQUE e691a6dc +#endif +#define QUICKCPPLIB_VERSION_GLUE2(a, b) a##b +#define QUICKCPPLIB_VERSION_GLUE(a, b) QUICKCPPLIB_VERSION_GLUE2(a, b) +// clang-format off +#if defined(QUICKCPPLIB_DISABLE_ABI_PERMUTATION) +#define QUICKCPPLIB_NAMESPACE quickcpplib +#define QUICKCPPLIB_NAMESPACE_BEGIN namespace quickcpplib { +#define QUICKCPPLIB_NAMESPACE_END } +#else +#define QUICKCPPLIB_NAMESPACE quickcpplib::QUICKCPPLIB_VERSION_GLUE(_, QUICKCPPLIB_PREVIOUS_COMMIT_UNIQUE) +#define QUICKCPPLIB_NAMESPACE_BEGIN namespace quickcpplib { namespace QUICKCPPLIB_VERSION_GLUE(_, QUICKCPPLIB_PREVIOUS_COMMIT_UNIQUE) { +#define QUICKCPPLIB_NAMESPACE_END } } +#endif +// clang-format on +#ifdef _MSC_VER +#define QUICKCPPLIB_BIND_MESSAGE_PRAGMA2(x) __pragma(message(x)) +#define QUICKCPPLIB_BIND_MESSAGE_PRAGMA(x) QUICKCPPLIB_BIND_MESSAGE_PRAGMA2(x) +#define QUICKCPPLIB_BIND_MESSAGE_PREFIX(type) __FILE__ "(" QUICKCPPLIB_BIND_STRINGIZE2(__LINE__) "): " type ": " +#define QUICKCPPLIB_BIND_MESSAGE_(type, prefix, msg) QUICKCPPLIB_BIND_MESSAGE_PRAGMA(prefix msg) +#else +#define QUICKCPPLIB_BIND_MESSAGE_PRAGMA2(x) _Pragma(#x) +#define QUICKCPPLIB_BIND_MESSAGE_PRAGMA(type, x) QUICKCPPLIB_BIND_MESSAGE_PRAGMA2(type x) +#define QUICKCPPLIB_BIND_MESSAGE_(type, prefix, msg) QUICKCPPLIB_BIND_MESSAGE_PRAGMA(type, msg) +#endif +//! Have the compiler output a message +#define QUICKCPPLIB_MESSAGE(msg) QUICKCPPLIB_BIND_MESSAGE_(message, QUICKCPPLIB_BIND_MESSAGE_PREFIX("message"), msg) +//! Have the compiler output a note +#define QUICKCPPLIB_NOTE(msg) QUICKCPPLIB_BIND_MESSAGE_(message, QUICKCPPLIB_BIND_MESSAGE_PREFIX("note"), msg) +//! Have the compiler output a warning +#define QUICKCPPLIB_WARNING(msg) QUICKCPPLIB_BIND_MESSAGE_(GCC warning, QUICKCPPLIB_BIND_MESSAGE_PREFIX("warning"), msg) +//! Have the compiler output an error +#define QUICKCPPLIB_ERROR(msg) QUICKCPPLIB_BIND_MESSAGE_(GCC error, QUICKCPPLIB_BIND_MESSAGE_PREFIX("error"), msg) +#define QUICKCPPLIB_ANNOTATE_RWLOCK_CREATE(p) +#define QUICKCPPLIB_ANNOTATE_RWLOCK_DESTROY(p) +#define QUICKCPPLIB_ANNOTATE_RWLOCK_ACQUIRED(p, s) +#define QUICKCPPLIB_ANNOTATE_RWLOCK_RELEASED(p, s) +#define QUICKCPPLIB_ANNOTATE_IGNORE_READS_BEGIN() +#define QUICKCPPLIB_ANNOTATE_IGNORE_READS_END() +#define QUICKCPPLIB_ANNOTATE_IGNORE_WRITES_BEGIN() +#define QUICKCPPLIB_ANNOTATE_IGNORE_WRITES_END() +#define QUICKCPPLIB_DRD_IGNORE_VAR(x) +#define QUICKCPPLIB_DRD_STOP_IGNORING_VAR(x) +#define QUICKCPPLIB_RUNNING_ON_VALGRIND (0) +#ifndef QUICKCPPLIB_IN_THREAD_SANITIZER +#if defined(__has_feature) +#if __has_feature(thread_sanitizer) +#define QUICKCPPLIB_IN_THREAD_SANITIZER 1 +#endif +#elif defined(__SANITIZE_THREAD__) +#define QUICKCPPLIB_IN_THREAD_SANITIZER 1 +#endif +#endif +#ifndef QUICKCPPLIB_IN_THREAD_SANITIZER +#define QUICKCPPLIB_IN_THREAD_SANITIZER 0 +#endif +#if QUICKCPPLIB_IN_THREAD_SANITIZER +#define QUICKCPPLIB_DISABLE_THREAD_SANITIZE __attribute__((no_sanitize_thread)) +#else +#define QUICKCPPLIB_DISABLE_THREAD_SANITIZE +#endif +#ifndef QUICKCPPLIB_SMT_PAUSE +#if !defined(__clang__) && defined(_MSC_VER) && _MSC_VER >= 1310 && (defined(_M_IX86) || defined(_M_X64)) +extern "C" void _mm_pause(); +#pragma intrinsic(_mm_pause) +#define QUICKCPPLIB_SMT_PAUSE _mm_pause(); +#elif !defined(__c2__) && defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) +#define QUICKCPPLIB_SMT_PAUSE __asm__ __volatile__("rep; nop" : : : "memory"); +#endif +#endif +#ifndef QUICKCPPLIB_FORCEINLINE +#if defined(_MSC_VER) +#define QUICKCPPLIB_FORCEINLINE __forceinline +#elif defined(__GNUC__) +#define QUICKCPPLIB_FORCEINLINE __attribute__((always_inline)) +#else +#define QUICKCPPLIB_FORCEINLINE +#endif +#endif +#ifndef QUICKCPPLIB_NOINLINE +#if defined(_MSC_VER) +#define QUICKCPPLIB_NOINLINE __declspec(noinline) +#elif defined(__GNUC__) +#define QUICKCPPLIB_NOINLINE __attribute__((noinline)) +#else +#define QUICKCPPLIB_NOINLINE +#endif +#endif +#ifdef __has_cpp_attribute +#define QUICKCPPLIB_HAS_CPP_ATTRIBUTE(attr) __has_cpp_attribute(attr) +#else +#define QUICKCPPLIB_HAS_CPP_ATTRIBUTE(attr) (0) +#endif +#if !defined(QUICKCPPLIB_NORETURN) +#if QUICKCPPLIB_HAS_CPP_ATTRIBUTE(noreturn) +#define QUICKCPPLIB_NORETURN [[noreturn]] +#elif defined(_MSC_VER) +#define QUICKCPPLIB_NORETURN __declspec(noreturn) +#elif defined(__GNUC__) +#define QUICKCPPLIB_NORETURN __attribute__((__noreturn__)) +#else +#define QUICKCPPLIB_NORETURN +#endif +#endif +#ifndef QUICKCPPLIB_NODISCARD +#if 0 || (_HAS_CXX17 && _MSC_VER >= 1911 /* VS2017.3 */) +#define QUICKCPPLIB_NODISCARD [[nodiscard]] +#endif +#endif +#ifndef QUICKCPPLIB_NODISCARD +#if defined(__clang__) && !_HAS_CXX17 +#define QUICKCPPLIB_NODISCARD __attribute__((warn_unused_result)) +#elif QUICKCPPLIB_HAS_CPP_ATTRIBUTE(nodiscard) +#define QUICKCPPLIB_NODISCARD [[nodiscard]] +#elif defined(_MSC_VER) +// _Must_inspect_result_ expands into this +#define QUICKCPPLIB_NODISCARD __declspec("SAL_name" "(" "\"_Must_inspect_result_\"" "," "\"\"" "," "\"2\"" ")") __declspec("SAL_begin") __declspec("SAL_post") __declspec("SAL_mustInspect") __declspec("SAL_post") __declspec("SAL_checkReturn") __declspec("SAL_end") +#endif +#endif +#ifndef QUICKCPPLIB_NODISCARD +#define QUICKCPPLIB_NODISCARD +#endif +#ifndef QUICKCPPLIB_SYMBOL_VISIBLE +#if defined(_MSC_VER) +#define QUICKCPPLIB_SYMBOL_VISIBLE +#elif defined(__GNUC__) +#define QUICKCPPLIB_SYMBOL_VISIBLE __attribute__((visibility("default"))) +#else +#define QUICKCPPLIB_SYMBOL_VISIBLE +#endif +#endif +#ifndef QUICKCPPLIB_SYMBOL_EXPORT +#if defined(_MSC_VER) +#define QUICKCPPLIB_SYMBOL_EXPORT __declspec(dllexport) +#elif defined(__GNUC__) +#define QUICKCPPLIB_SYMBOL_EXPORT __attribute__((visibility("default"))) +#else +#define QUICKCPPLIB_SYMBOL_EXPORT +#endif +#endif +#ifndef QUICKCPPLIB_SYMBOL_IMPORT +#if defined(_MSC_VER) +#define QUICKCPPLIB_SYMBOL_IMPORT __declspec(dllimport) +#elif defined(__GNUC__) +#define QUICKCPPLIB_SYMBOL_IMPORT +#else +#define QUICKCPPLIB_SYMBOL_IMPORT +#endif +#endif +#ifndef QUICKCPPLIB_THREAD_LOCAL +#if _MSC_VER >= 1800 +#define QUICKCPPLIB_THREAD_LOCAL_IS_CXX11 1 +#elif __cplusplus >= 201103 +#if __GNUC__ >= 5 && !defined(__clang__) +#define QUICKCPPLIB_THREAD_LOCAL_IS_CXX11 1 +#elif defined(__has_feature) +#if __has_feature(cxx_thread_local) +#define QUICKCPPLIB_THREAD_LOCAL_IS_CXX11 1 +#endif +#endif +#endif +#ifdef QUICKCPPLIB_THREAD_LOCAL_IS_CXX11 +#define QUICKCPPLIB_THREAD_LOCAL thread_local +#endif +#ifndef QUICKCPPLIB_THREAD_LOCAL +#if defined(_MSC_VER) +#define QUICKCPPLIB_THREAD_LOCAL __declspec(thread) +#elif defined(__GNUC__) +#define QUICKCPPLIB_THREAD_LOCAL __thread +#else +#error Unknown compiler, cannot set QUICKCPPLIB_THREAD_LOCAL +#endif +#endif +#endif +/* MSVC capable preprocessor macro overloading +(C) 2014-2017 Niall Douglas (3 commits) +File Created: Aug 2014 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef QUICKCPPLIB_PREPROCESSOR_MACRO_OVERLOAD_H +#define QUICKCPPLIB_PREPROCESSOR_MACRO_OVERLOAD_H +#define QUICKCPPLIB_GLUE(x, y) x y +#define QUICKCPPLIB_RETURN_ARG_COUNT(_1_, _2_, _3_, _4_, _5_, _6_, _7_, _8_, count, ...) count +#define QUICKCPPLIB_EXPAND_ARGS(args) QUICKCPPLIB_RETURN_ARG_COUNT args +#define QUICKCPPLIB_COUNT_ARGS_MAX8(...) QUICKCPPLIB_EXPAND_ARGS((__VA_ARGS__, 8, 7, 6, 5, 4, 3, 2, 1, 0)) +#define QUICKCPPLIB_OVERLOAD_MACRO2(name, count) name##count +#define QUICKCPPLIB_OVERLOAD_MACRO1(name, count) QUICKCPPLIB_OVERLOAD_MACRO2(name, count) +#define QUICKCPPLIB_OVERLOAD_MACRO(name, count) QUICKCPPLIB_OVERLOAD_MACRO1(name, count) +#define QUICKCPPLIB_CALL_OVERLOAD(name, ...) QUICKCPPLIB_GLUE(QUICKCPPLIB_OVERLOAD_MACRO(name, QUICKCPPLIB_COUNT_ARGS_MAX8(__VA_ARGS__)), (__VA_ARGS__)) +#define QUICKCPPLIB_GLUE_(x, y) x y +#define QUICKCPPLIB_RETURN_ARG_COUNT_(_1_, _2_, _3_, _4_, _5_, _6_, _7_, _8_, count, ...) count +#define QUICKCPPLIB_EXPAND_ARGS_(args) QUICKCPPLIB_RETURN_ARG_COUNT_ args +#define QUICKCPPLIB_COUNT_ARGS_MAX8_(...) QUICKCPPLIB_EXPAND_ARGS_((__VA_ARGS__, 8, 7, 6, 5, 4, 3, 2, 1, 0)) +#define QUICKCPPLIB_OVERLOAD_MACRO2_(name, count) name##count +#define QUICKCPPLIB_OVERLOAD_MACRO1_(name, count) QUICKCPPLIB_OVERLOAD_MACRO2_(name, count) +#define QUICKCPPLIB_OVERLOAD_MACRO_(name, count) QUICKCPPLIB_OVERLOAD_MACRO1_(name, count) +#define QUICKCPPLIB_CALL_OVERLOAD_(name, ...) QUICKCPPLIB_GLUE_(QUICKCPPLIB_OVERLOAD_MACRO_(name, QUICKCPPLIB_COUNT_ARGS_MAX8_(__VA_ARGS__)), (__VA_ARGS__)) +#endif +#if defined(__cpp_concepts) && !defined(QUICKCPPLIB_DISABLE_CONCEPTS_SUPPORT) +#define QUICKCPPLIB_TREQUIRES_EXPAND8(a, b, c, d, e, f, g, h) a &&QUICKCPPLIB_TREQUIRES_EXPAND7(b, c, d, e, f, g, h) +#define QUICKCPPLIB_TREQUIRES_EXPAND7(a, b, c, d, e, f, g) a &&QUICKCPPLIB_TREQUIRES_EXPAND6(b, c, d, e, f, g) +#define QUICKCPPLIB_TREQUIRES_EXPAND6(a, b, c, d, e, f) a &&QUICKCPPLIB_TREQUIRES_EXPAND5(b, c, d, e, f) +#define QUICKCPPLIB_TREQUIRES_EXPAND5(a, b, c, d, e) a &&QUICKCPPLIB_TREQUIRES_EXPAND4(b, c, d, e) +#define QUICKCPPLIB_TREQUIRES_EXPAND4(a, b, c, d) a &&QUICKCPPLIB_TREQUIRES_EXPAND3(b, c, d) +#define QUICKCPPLIB_TREQUIRES_EXPAND3(a, b, c) a &&QUICKCPPLIB_TREQUIRES_EXPAND2(b, c) +#define QUICKCPPLIB_TREQUIRES_EXPAND2(a, b) a &&QUICKCPPLIB_TREQUIRES_EXPAND1(b) +#define QUICKCPPLIB_TREQUIRES_EXPAND1(a) a +//! Expands into a && b && c && ... +#define QUICKCPPLIB_TREQUIRES(...) requires QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_TREQUIRES_EXPAND, __VA_ARGS__) +#define QUICKCPPLIB_TEMPLATE(...) template <__VA_ARGS__> +#define QUICKCPPLIB_TEXPR(...) requires { (__VA_ARGS__); } +#define QUICKCPPLIB_TPRED(...) (__VA_ARGS__) +#if !defined(_MSC_VER) || _MSC_FULL_VER >= 192400000 // VS 2019 16.3 is broken here +#define QUICKCPPLIB_REQUIRES(...) requires(__VA_ARGS__) +#else +#define QUICKCPPLIB_REQUIRES(...) +#endif +#else +#define QUICKCPPLIB_TEMPLATE(...) template <__VA_ARGS__ +#define QUICKCPPLIB_TREQUIRES(...) , __VA_ARGS__ > +#define QUICKCPPLIB_TEXPR(...) typename = decltype(__VA_ARGS__) +#ifdef _MSC_VER +// MSVC gives an error if every specialisation of a template is always ill-formed, so +// the more powerful SFINAE form below causes pukeage :( +#define QUICKCPPLIB_TPRED(...) typename = typename std::enable_if<(__VA_ARGS__)>::type +#else +#define QUICKCPPLIB_TPRED(...) typename std::enable_if<(__VA_ARGS__), bool>::type = true +#endif +#define QUICKCPPLIB_REQUIRES(...) +#endif +#endif +#ifndef __cpp_variadic_templates +#error Outcome needs variadic template support in the compiler +#endif +#if __cpp_constexpr < 201304 && _MSC_FULL_VER < 191100000 +#error Outcome needs constexpr (C++ 14) support in the compiler +#endif +#ifndef __cpp_variable_templates +#error Outcome needs variable template support in the compiler +#endif +#if !defined(__clang__) && defined(__GNUC__) && __GNUC__ < 6 +#error Due to a bug in nested template variables parsing, Outcome does not work on GCCs earlier than v6. +#endif +#ifndef OUTCOME_SYMBOL_VISIBLE +#define OUTCOME_SYMBOL_VISIBLE QUICKCPPLIB_SYMBOL_VISIBLE +#endif +#ifndef OUTCOME_FORCEINLINE +#define OUTCOME_FORCEINLINE QUICKCPPLIB_FORCEINLINE +#endif +#ifndef OUTCOME_NODISCARD +#define OUTCOME_NODISCARD QUICKCPPLIB_NODISCARD +#endif +#ifndef OUTCOME_THREAD_LOCAL +#define OUTCOME_THREAD_LOCAL QUICKCPPLIB_THREAD_LOCAL +#endif +#ifndef OUTCOME_TEMPLATE +#define OUTCOME_TEMPLATE(...) QUICKCPPLIB_TEMPLATE(__VA_ARGS__) +#endif +#ifndef OUTCOME_TREQUIRES +#define OUTCOME_TREQUIRES(...) QUICKCPPLIB_TREQUIRES(__VA_ARGS__) +#endif +#ifndef OUTCOME_TEXPR +#define OUTCOME_TEXPR(...) QUICKCPPLIB_TEXPR(__VA_ARGS__) +#endif +#ifndef OUTCOME_TPRED +#define OUTCOME_TPRED(...) QUICKCPPLIB_TPRED(__VA_ARGS__) +#endif +#ifndef OUTCOME_REQUIRES +#define OUTCOME_REQUIRES(...) QUICKCPPLIB_REQUIRES(__VA_ARGS__) +#endif +/* Convenience macros for importing local namespace binds +(C) 2014-2017 Niall Douglas (9 commits) +File Created: Aug 2014 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef QUICKCPPLIB_BIND_IMPORT_HPP +#define QUICKCPPLIB_BIND_IMPORT_HPP +/* 2014-10-9 ned: I lost today figuring out the below. I really hate the C preprocessor now. + * + * Anyway, infinity = 8. It's easy to expand below if needed. + */ +#define QUICKCPPLIB_BIND_STRINGIZE(a) #a +#define QUICKCPPLIB_BIND_STRINGIZE2(a) QUICKCPPLIB_BIND_STRINGIZE(a) +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION8(a, b, c, d, e, f, g, h) a##_##b##_##c##_##d##_##e##_##f##_##g##_##h +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION7(a, b, c, d, e, f, g) a##_##b##_##c##_##d##_##e##_##f##_##g +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION6(a, b, c, d, e, f) a##_##b##_##c##_##d##_##e##_##f +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION5(a, b, c, d, e) a##_##b##_##c##_##d##_##e +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION4(a, b, c, d) a##_##b##_##c##_##d +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION3(a, b, c) a##_##b##_##c +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION2(a, b) a##_##b +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION1(a) a +//! Concatenates each parameter with _ +#define QUICKCPPLIB_BIND_NAMESPACE_VERSION(...) QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_BIND_NAMESPACE_VERSION, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT_2(name, modifier) name +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT2(name, modifier) ::name +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT_1(name) name +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT1(name) ::name +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT_(...) QUICKCPPLIB_CALL_OVERLOAD_(QUICKCPPLIB_BIND_NAMESPACE_SELECT_, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_SELECT(...) QUICKCPPLIB_CALL_OVERLOAD_(QUICKCPPLIB_BIND_NAMESPACE_SELECT, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND8(a, b, c, d, e, f, g, h) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c QUICKCPPLIB_BIND_NAMESPACE_SELECT d QUICKCPPLIB_BIND_NAMESPACE_SELECT e QUICKCPPLIB_BIND_NAMESPACE_SELECT f QUICKCPPLIB_BIND_NAMESPACE_SELECT g QUICKCPPLIB_BIND_NAMESPACE_SELECT h +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND7(a, b, c, d, e, f, g) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c QUICKCPPLIB_BIND_NAMESPACE_SELECT d QUICKCPPLIB_BIND_NAMESPACE_SELECT e QUICKCPPLIB_BIND_NAMESPACE_SELECT f QUICKCPPLIB_BIND_NAMESPACE_SELECT g +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND6(a, b, c, d, e, f) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c QUICKCPPLIB_BIND_NAMESPACE_SELECT d QUICKCPPLIB_BIND_NAMESPACE_SELECT e QUICKCPPLIB_BIND_NAMESPACE_SELECT f +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND5(a, b, c, d, e) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c QUICKCPPLIB_BIND_NAMESPACE_SELECT d QUICKCPPLIB_BIND_NAMESPACE_SELECT e +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND4(a, b, c, d) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c QUICKCPPLIB_BIND_NAMESPACE_SELECT d +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND3(a, b, c) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b QUICKCPPLIB_BIND_NAMESPACE_SELECT c +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND2(a, b) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a QUICKCPPLIB_BIND_NAMESPACE_SELECT b +#define QUICKCPPLIB_BIND_NAMESPACE_EXPAND1(a) QUICKCPPLIB_BIND_NAMESPACE_SELECT_ a +//! Expands into a::b::c:: ... +#define QUICKCPPLIB_BIND_NAMESPACE(...) QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_BIND_NAMESPACE_EXPAND, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT2(name, modifier) modifier namespace name { +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT1(name) namespace name { +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT(...) QUICKCPPLIB_CALL_OVERLOAD_(QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND8(a, b, c, d, e, f, g, h) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND7(b, c, d, e, f, g, h) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND7(a, b, c, d, e, f, g) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND6(b, c, d, e, f, g) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND6(a, b, c, d, e, f) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND5(b, c, d, e, f) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND5(a, b, c, d, e) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND4(b, c, d, e) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND4(a, b, c, d) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND3(b, c, d) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND3(a, b, c) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND2(b, c) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND2(a, b) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND1(b) +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND1(a) QUICKCPPLIB_BIND_NAMESPACE_BEGIN_NAMESPACE_SELECT a +//! Expands into namespace a { namespace b { namespace c ... +#define QUICKCPPLIB_BIND_NAMESPACE_BEGIN(...) QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_BIND_NAMESPACE_BEGIN_EXPAND, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT2(name, modifier) modifier namespace name { +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT1(name) export namespace name { +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT(...) QUICKCPPLIB_CALL_OVERLOAD_(QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND8(a, b, c, d, e, f, g, h) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND7(b, c, d, e, f, g, h) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND7(a, b, c, d, e, f, g) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND6(b, c, d, e, f, g) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND6(a, b, c, d, e, f) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND5(b, c, d, e, f) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND5(a, b, c, d, e) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND4(b, c, d, e) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND4(a, b, c, d) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND3(b, c, d) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND3(a, b, c) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND2(b, c) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND2(a, b) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND1(b) +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND1(a) QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_NAMESPACE_SELECT a +//! Expands into export namespace a { namespace b { namespace c ... +#define QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN(...) QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN_EXPAND, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT2(name, modifier) } +#define QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT1(name) } +#define QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT(...) QUICKCPPLIB_CALL_OVERLOAD_(QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT, __VA_ARGS__) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND8(a, b, c, d, e, f, g, h) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND7(b, c, d, e, f, g, h) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND7(a, b, c, d, e, f, g) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND6(b, c, d, e, f, g) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND6(a, b, c, d, e, f) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND5(b, c, d, e, f) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND5(a, b, c, d, e) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND4(b, c, d, e) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND4(a, b, c, d) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND3(b, c, d) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND3(a, b, c) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND2(b, c) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND2(a, b) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND1(b) +#define QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND1(a) QUICKCPPLIB_BIND_NAMESPACE_END_NAMESPACE_SELECT a +//! Expands into } } ... +#define QUICKCPPLIB_BIND_NAMESPACE_END(...) QUICKCPPLIB_CALL_OVERLOAD(QUICKCPPLIB_BIND_NAMESPACE_END_EXPAND, __VA_ARGS__) +//! Expands into a static const char string array used to mark BindLib compatible namespaces +#define QUICKCPPLIB_BIND_DECLARE(decl, desc) static const char *quickcpplib_out[] = {#decl, desc}; +#endif +#ifndef OUTCOME_ENABLE_LEGACY_SUPPORT_FOR +#define OUTCOME_ENABLE_LEGACY_SUPPORT_FOR 220 // the v2.2 Outcome release +#endif +#if defined(OUTCOME_UNSTABLE_VERSION) +/* UPDATED BY SCRIPT +(C) 2017-2019 Niall Douglas (225 commits) + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +// Note the second line of this file must ALWAYS be the git SHA, third line ALWAYS the git SHA update time +#define OUTCOME_PREVIOUS_COMMIT_REF e261cebddfd2d5d1229dbf66c6dc0091a9f2a6f8 +#define OUTCOME_PREVIOUS_COMMIT_DATE "2021-10-26 10:23:56 +00:00" +#define OUTCOME_PREVIOUS_COMMIT_UNIQUE e261cebd +#define OUTCOME_V2 (QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2, OUTCOME_PREVIOUS_COMMIT_UNIQUE)) +#ifdef _DEBUG +#define OUTCOME_V2_CXX_MODULE_NAME QUICKCPPLIB_BIND_NAMESPACE((QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2d, OUTCOME_PREVIOUS_COMMIT_UNIQUE))) +#else +#define OUTCOME_V2_CXX_MODULE_NAME QUICKCPPLIB_BIND_NAMESPACE((QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2, OUTCOME_PREVIOUS_COMMIT_UNIQUE))) +#endif +#else +#define OUTCOME_V2 (QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2)) +#ifdef _DEBUG +#define OUTCOME_V2_CXX_MODULE_NAME QUICKCPPLIB_BIND_NAMESPACE((QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2d))) +#else +#define OUTCOME_V2_CXX_MODULE_NAME QUICKCPPLIB_BIND_NAMESPACE((QUICKCPPLIB_BIND_NAMESPACE_VERSION(outcome_v2))) +#endif +#endif +#if defined(GENERATING_OUTCOME_MODULE_INTERFACE) +#define OUTCOME_V2_NAMESPACE QUICKCPPLIB_BIND_NAMESPACE(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_BEGIN QUICKCPPLIB_BIND_NAMESPACE_BEGIN(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_EXPORT_BEGIN QUICKCPPLIB_BIND_NAMESPACE_EXPORT_BEGIN(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_END QUICKCPPLIB_BIND_NAMESPACE_END(OUTCOME_V2) +#else +#define OUTCOME_V2_NAMESPACE QUICKCPPLIB_BIND_NAMESPACE(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_BEGIN QUICKCPPLIB_BIND_NAMESPACE_BEGIN(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_EXPORT_BEGIN QUICKCPPLIB_BIND_NAMESPACE_BEGIN(OUTCOME_V2) +#define OUTCOME_V2_NAMESPACE_END QUICKCPPLIB_BIND_NAMESPACE_END(OUTCOME_V2) +#endif +#include // for uint32_t etc +#include +#include // for future serialisation +#include // for placement in moves etc +#include +#ifndef OUTCOME_USE_STD_IN_PLACE_TYPE +#if defined(_MSC_VER) && _HAS_CXX17 +#define OUTCOME_USE_STD_IN_PLACE_TYPE 1 // MSVC always has std::in_place_type +#elif __cplusplus >= 201700 +// libstdc++ before GCC 6 doesn't have it, despite claiming C++ 17 support +#ifdef __has_include +#if !__has_include() +#define OUTCOME_USE_STD_IN_PLACE_TYPE 0 // must have it if is present +#endif +#endif +#ifndef OUTCOME_USE_STD_IN_PLACE_TYPE +#define OUTCOME_USE_STD_IN_PLACE_TYPE 1 +#endif +#else +#define OUTCOME_USE_STD_IN_PLACE_TYPE 0 +#endif +#endif +#if OUTCOME_USE_STD_IN_PLACE_TYPE +#include // for in_place_type_t +OUTCOME_V2_NAMESPACE_BEGIN +template using in_place_type_t = std::in_place_type_t; +using std::in_place_type; +OUTCOME_V2_NAMESPACE_END +#else +OUTCOME_V2_NAMESPACE_BEGIN +/*! AWAITING HUGO JSON CONVERSION TOOL +type definition template in_place_type_t. Potential doc page: `in_place_type_t` +*/ +template struct in_place_type_t +{ + explicit in_place_type_t() = default; +}; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template constexpr in_place_type_t in_place_type{}; +OUTCOME_V2_NAMESPACE_END +#endif +#ifndef OUTCOME_TRIVIAL_ABI +#if 0 || __clang_major__ >= 7 +//! Defined to be `[[clang::trivial_abi]]` when on a new enough clang compiler. Usually automatic, can be overriden. +#define OUTCOME_TRIVIAL_ABI [[clang::trivial_abi]] +#else +#define OUTCOME_TRIVIAL_ABI +#endif +#endif +OUTCOME_V2_NAMESPACE_BEGIN +namespace detail +{ + // Test if type is an in_place_type_t + template struct is_in_place_type_t + { + static constexpr bool value = false; + }; + template struct is_in_place_type_t> + { + static constexpr bool value = true; + }; + // Replace void with constructible void_type + struct empty_type + { + }; + struct void_type + { + // We always compare true to another instance of me + constexpr bool operator==(void_type /*unused*/) const noexcept { return true; } + constexpr bool operator!=(void_type /*unused*/) const noexcept { return false; } + }; + template using devoid = std::conditional_t::value, void_type, T>; + template using rebind_type5 = Output; + template + using rebind_type4 = std::conditional_t< // + std::is_volatile::value, // + std::add_volatile_t>>, // + rebind_type5>; + template + using rebind_type3 = std::conditional_t< // + std::is_const::value, // + std::add_const_t>>, // + rebind_type4>; + template + using rebind_type2 = std::conditional_t< // + std::is_lvalue_reference::value, // + std::add_lvalue_reference_t>>, // + rebind_type3>; + template + using rebind_type = std::conditional_t< // + std::is_rvalue_reference::value, // + std::add_rvalue_reference_t>>, // + rebind_type2>; + // static_assert(std::is_same_v, volatile const int &&>, ""); + /* True if type is the same or constructible. Works around a bug where clang + libstdc++ + pukes on std::is_constructible (this bug is fixed upstream). + */ + template struct _is_explicitly_constructible + { + static constexpr bool value = std::is_constructible::value; + }; + template struct _is_explicitly_constructible + { + static constexpr bool value = false; + }; + template <> struct _is_explicitly_constructible + { + static constexpr bool value = false; + }; + template static constexpr bool is_explicitly_constructible = _is_explicitly_constructible::value; + template struct _is_implicitly_constructible + { + static constexpr bool value = std::is_convertible::value; + }; + template struct _is_implicitly_constructible + { + static constexpr bool value = false; + }; + template <> struct _is_implicitly_constructible + { + static constexpr bool value = false; + }; + template static constexpr bool is_implicitly_constructible = _is_implicitly_constructible::value; + template struct _is_nothrow_constructible + { + static constexpr bool value = std::is_nothrow_constructible::value; + }; + template struct _is_nothrow_constructible + { + static constexpr bool value = false; + }; + template <> struct _is_nothrow_constructible + { + static constexpr bool value = false; + }; + template static constexpr bool is_nothrow_constructible = _is_nothrow_constructible::value; + template struct _is_constructible + { + static constexpr bool value = std::is_constructible::value; + }; + template struct _is_constructible + { + static constexpr bool value = false; + }; + template <> struct _is_constructible + { + static constexpr bool value = false; + }; + template static constexpr bool is_constructible = _is_constructible::value; +#ifndef OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE +#if defined(_MSC_VER) && _HAS_CXX17 +#define OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE 1 // MSVC always has std::is_nothrow_swappable +#elif __cplusplus >= 201700 +// libstdc++ before GCC 6 doesn't have it, despite claiming C++ 17 support +#ifdef __has_include +#if !__has_include() +#define OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE 0 +#endif +#endif +#ifndef OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE +#define OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE 1 +#endif +#else +#define OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE 0 +#endif +#endif +// True if type is nothrow swappable +#if !0 && OUTCOME_USE_STD_IS_NOTHROW_SWAPPABLE + template using is_nothrow_swappable = std::is_nothrow_swappable; +#else + template struct is_nothrow_swappable + { + static constexpr bool value = std::is_nothrow_move_constructible::value && std::is_nothrow_move_assignable::value; + }; +#endif +} // namespace detail +OUTCOME_V2_NAMESPACE_END +#ifndef OUTCOME_THROW_EXCEPTION +#ifdef __cpp_exceptions +#define OUTCOME_THROW_EXCEPTION(expr) throw expr +#else +#ifdef __ANDROID__ +#define OUTCOME_DISABLE_EXECINFO +#endif +#ifndef OUTCOME_DISABLE_EXECINFO +#ifdef _WIN32 +/* Implements backtrace() et al from glibc on win64 +(C) 2016-2017 Niall Douglas (4 commits) +File Created: Mar 2016 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef BOOST_BINDLIB_EXECINFO_WIN64_H +#define BOOST_BINDLIB_EXECINFO_WIN64_H +#ifndef _WIN32 +#error Can only be included on Windows +#endif +#include +#include +#ifdef QUICKCPPLIB_EXPORTS +#define EXECINFO_DECL extern __declspec(dllexport) +#else +#if defined(__cplusplus) && (!defined(QUICKCPPLIB_HEADERS_ONLY) || QUICKCPPLIB_HEADERS_ONLY == 1) && !0 +#define EXECINFO_DECL inline +#elif defined(QUICKCPPLIB_DYN_LINK) && !defined(QUICKCPPLIB_STATIC_LINK) +#define EXECINFO_DECL extern __declspec(dllimport) +#else +#define EXECINFO_DECL extern +#endif +#endif +#ifdef __cplusplus +extern "C" { +#endif +//! Fill the array of void * at bt with up to len entries, returning entries filled. +EXECINFO_DECL _Check_return_ size_t backtrace(_Out_writes_(len) void **bt, _In_ size_t len); +//! Returns a malloced block of string representations of the input backtrace. +EXECINFO_DECL _Check_return_ _Ret_writes_maybenull_(len) char **backtrace_symbols(_In_reads_(len) void *const *bt, _In_ size_t len); +// extern void backtrace_symbols_fd(void *const *bt, size_t len, int fd); +#ifdef __cplusplus +} +#if (!defined(QUICKCPPLIB_HEADERS_ONLY) || QUICKCPPLIB_HEADERS_ONLY == 1) && !0 +#define QUICKCPPLIB_INCLUDED_BY_HEADER 1 +/* Implements backtrace() et al from glibc on win64 +(C) 2016-2017 Niall Douglas (14 commits) +File Created: Mar 2016 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +/* Implements backtrace() et al from glibc on win64 +(C) 2016-2017 Niall Douglas (4 commits) +File Created: Mar 2016 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#include +#include // for abort +#include +// To avoid including windows.h, this source has been macro expanded and win32 function shimmed for C++ only +#if defined(__cplusplus) && !defined(__clang__) +namespace win32 +{ + extern _Ret_maybenull_ void *__stdcall LoadLibraryA(_In_ const char *lpLibFileName); + typedef int(__stdcall *GetProcAddress_returntype)(); + extern GetProcAddress_returntype __stdcall GetProcAddress(_In_ void *hModule, _In_ const char *lpProcName); + extern _Success_(return != 0) unsigned short __stdcall RtlCaptureStackBackTrace(_In_ unsigned long FramesToSkip, _In_ unsigned long FramesToCapture, + _Out_writes_to_(FramesToCapture, return ) void **BackTrace, + _Out_opt_ unsigned long *BackTraceHash); + extern _Success_(return != 0) + _When_((cchWideChar == -1) && (cbMultiByte != 0), + _Post_equal_to_(_String_length_(lpMultiByteStr) + + 1)) int __stdcall WideCharToMultiByte(_In_ unsigned int CodePage, _In_ unsigned long dwFlags, const wchar_t *lpWideCharStr, + _In_ int cchWideChar, _Out_writes_bytes_to_opt_(cbMultiByte, return ) char *lpMultiByteStr, + _In_ int cbMultiByte, _In_opt_ const char *lpDefaultChar, _Out_opt_ int *lpUsedDefaultChar); +#pragma comment(lib, "kernel32.lib") +#if (defined(__x86_64__) || defined(_M_X64)) || (defined(__aarch64__) || defined(_M_ARM64)) +#pragma comment(linker, "/alternatename:?LoadLibraryA@win32@@YAPEAXPEBD@Z=LoadLibraryA") +#pragma comment(linker, "/alternatename:?GetProcAddress@win32@@YAP6AHXZPEAXPEBD@Z=GetProcAddress") +#pragma comment(linker, "/alternatename:?RtlCaptureStackBackTrace@win32@@YAGKKPEAPEAXPEAK@Z=RtlCaptureStackBackTrace") +#pragma comment(linker, "/alternatename:?WideCharToMultiByte@win32@@YAHIKPEB_WHPEADHPEBDPEAH@Z=WideCharToMultiByte") +#elif defined(__x86__) || defined(_M_IX86) || defined(__i386__) +#pragma comment(linker, "/alternatename:?LoadLibraryA@win32@@YGPAXPBD@Z=__imp__LoadLibraryA@4") +#pragma comment(linker, "/alternatename:?GetProcAddress@win32@@YGP6GHXZPAXPBD@Z=__imp__GetProcAddress@8") +#pragma comment(linker, "/alternatename:?RtlCaptureStackBackTrace@win32@@YGGKKPAPAXPAK@Z=__imp__RtlCaptureStackBackTrace@16") +#pragma comment(linker, "/alternatename:?WideCharToMultiByte@win32@@YGHIKPB_WHPADHPBDPAH@Z=__imp__WideCharToMultiByte@32") +#elif defined(__arm__) || defined(_M_ARM) +#pragma comment(linker, "/alternatename:?LoadLibraryA@win32@@YAPAXPBD@Z=LoadLibraryA") +#pragma comment(linker, "/alternatename:?GetProcAddress@win32@@YAP6AHXZPAXPBD@Z=GetProcAddress") +#pragma comment(linker, "/alternatename:?RtlCaptureStackBackTrace@win32@@YAGKKPAPAXPAK@Z=RtlCaptureStackBackTrace") +#pragma comment(linker, "/alternatename:?WideCharToMultiByte@win32@@YAHIKPB_WHPADHPBDPAH@Z=WideCharToMultiByte") +#else +#error Unknown architecture +#endif +} // namespace win32 +#else +#ifndef WIN32_LEAN_AND_MEAN +#define WIN32_LEAN_AND_MEAN +#endif +#ifndef NOMINMAX +#define NOMINMAX +#endif +#include +#endif +#ifdef __cplusplus +namespace +{ +#endif + typedef struct _IMAGEHLP_LINE64 + { + unsigned long SizeOfStruct; + void *Key; + unsigned long LineNumber; + wchar_t *FileName; + unsigned long long int Address; + } IMAGEHLP_LINE64, *PIMAGEHLP_LINE64; + typedef int(__stdcall *SymInitialize_t)(_In_ void *hProcess, _In_opt_ const wchar_t *UserSearchPath, _In_ int fInvadeProcess); + typedef int(__stdcall *SymGetLineFromAddr64_t)(_In_ void *hProcess, _In_ unsigned long long int dwAddr, _Out_ unsigned long *pdwDisplacement, + _Out_ PIMAGEHLP_LINE64 Line); + static std::atomic dbghelp_init_lock; +#if defined(__cplusplus) && !defined(__clang__) + static void *dbghelp; +#else +static HMODULE dbghelp; +#endif + static SymInitialize_t SymInitialize; + static SymGetLineFromAddr64_t SymGetLineFromAddr64; + static void load_dbghelp() + { +#if defined(__cplusplus) && !defined(__clang__) + using win32::GetProcAddress; + using win32::LoadLibraryA; +#endif + while(dbghelp_init_lock.exchange(1, std::memory_order_acq_rel)) + ; + if(dbghelp) + { + dbghelp_init_lock.store(0, std::memory_order_release); + return; + } + dbghelp = LoadLibraryA("DBGHELP.DLL"); + if(dbghelp) + { + SymInitialize = (SymInitialize_t) GetProcAddress(dbghelp, "SymInitializeW"); + if(!SymInitialize) + abort(); + if(!SymInitialize((void *) (size_t) -1 /*GetCurrentProcess()*/, NULL, 1)) + abort(); + SymGetLineFromAddr64 = (SymGetLineFromAddr64_t) GetProcAddress(dbghelp, "SymGetLineFromAddrW64"); + if(!SymGetLineFromAddr64) + abort(); + } + dbghelp_init_lock.store(0, std::memory_order_release); + } +#ifdef __cplusplus +} +#endif +#ifdef __cplusplus +extern "C" +{ +#endif + _Check_return_ size_t backtrace(_Out_writes_(len) void **bt, _In_ size_t len) + { +#if defined(__cplusplus) && !defined(__clang__) + using win32::RtlCaptureStackBackTrace; +#endif + return RtlCaptureStackBackTrace(1, (unsigned long) len, bt, NULL); + } +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 6385 6386) // MSVC static analyser can't grok this function. clang's analyser gives it thumbs up. +#endif + _Check_return_ _Ret_writes_maybenull_(len) char **backtrace_symbols(_In_reads_(len) void *const *bt, _In_ size_t len) + { +#if defined(__cplusplus) && !defined(__clang__) + using win32::WideCharToMultiByte; +#endif + size_t bytes = (len + 1) * sizeof(void *) + 256, n; + if(!len) + return NULL; + else + { + char **ret = (char **) malloc(bytes); + char *p = (char *) (ret + len + 1), *end = (char *) ret + bytes; + if(!ret) + return NULL; + for(n = 0; n < len + 1; n++) + ret[n] = NULL; + load_dbghelp(); + for(n = 0; n < len; n++) + { + unsigned long displ; + IMAGEHLP_LINE64 ihl; + memset(&ihl, 0, sizeof(ihl)); + ihl.SizeOfStruct = sizeof(IMAGEHLP_LINE64); + int please_realloc = 0; + if(!bt[n]) + { + ret[n] = NULL; + } + else + { + // Keep offset till later + ret[n] = (char *) ((char *) p - (char *) ret); + { + static std::atomic symlock(0); + while(symlock.exchange(1, std::memory_order_acq_rel)) + ; + if(!SymGetLineFromAddr64 || !SymGetLineFromAddr64((void *) (size_t) -1 /*GetCurrentProcess()*/, (size_t) bt[n], &displ, &ihl)) + { + symlock.store(0, std::memory_order_release); + if(n == 0) + { + free(ret); + return NULL; + } + ihl.FileName = (wchar_t *) L"unknown"; + ihl.LineNumber = 0; + } + else + { + symlock.store(0, std::memory_order_release); + } + } + retry: + if(please_realloc) + { + char **temp = (char **) realloc(ret, bytes + 256); + if(!temp) + { + free(ret); + return NULL; + } + p = (char *) temp + (p - (char *) ret); + ret = temp; + bytes += 256; + end = (char *) ret + bytes; + } + if(ihl.FileName && ihl.FileName[0]) + { + int plen = WideCharToMultiByte(65001 /*CP_UTF8*/, 0, ihl.FileName, -1, p, (int) (end - p), NULL, NULL); + if(!plen) + { + please_realloc = 1; + goto retry; + } + p[plen - 1] = 0; + p += plen - 1; + } + else + { + if(end - p < 16) + { + please_realloc = 1; + goto retry; + } + _ui64toa_s((size_t) bt[n], p, end - p, 16); + p = strchr(p, 0); + } + if(end - p < 16) + { + please_realloc = 1; + goto retry; + } + *p++ = ':'; + _itoa_s(ihl.LineNumber, p, end - p, 10); + p = strchr(p, 0) + 1; + } + } + for(n = 0; n < len; n++) + { + if(ret[n]) + ret[n] = (char *) ret + (size_t) ret[n]; + } + return ret; + } + } +#ifdef _MSC_VER +#pragma warning(pop) +#endif + // extern void backtrace_symbols_fd(void *const *bt, size_t len, int fd); +#ifdef __cplusplus +} +#endif +#undef QUICKCPPLIB_INCLUDED_BY_HEADER +#endif +#endif +#endif +#else +#include +#endif +#endif // OUTCOME_DISABLE_EXECINFO +#include +#include +OUTCOME_V2_NAMESPACE_BEGIN +namespace detail +{ + QUICKCPPLIB_NORETURN inline void do_fatal_exit(const char *expr) + { +#if !defined(OUTCOME_DISABLE_EXECINFO) + void *bt[16]; + size_t btlen = backtrace(bt, sizeof(bt) / sizeof(bt[0])); // NOLINT +#endif + fprintf(stderr, "FATAL: Outcome throws exception %s with exceptions disabled\n", expr); // NOLINT +#if !defined(OUTCOME_DISABLE_EXECINFO) + char **bts = backtrace_symbols(bt, btlen); // NOLINT + if(bts != nullptr) + { + for(size_t n = 0; n < btlen; n++) + { + fprintf(stderr, " %s\n", bts[n]); // NOLINT + } + free(bts); // NOLINT + } +#endif + abort(); + } +} // namespace detail +OUTCOME_V2_NAMESPACE_END +#define OUTCOME_THROW_EXCEPTION(expr) OUTCOME_V2_NAMESPACE::detail::do_fatal_exit(#expr), (void) (expr) +#endif +#endif +#ifndef BOOST_OUTCOME_AUTO_TEST_CASE +#define BOOST_OUTCOME_AUTO_TEST_CASE(a, b) BOOST_AUTO_TEST_CASE(a, b) +#endif +#endif +#define OUTCOME_COROUTINE_SUPPORT_NAMESPACE_BEGIN OUTCOME_V2_NAMESPACE_BEGIN namespace awaitables { +// +#define OUTCOME_COROUTINE_SUPPORT_NAMESPACE_EXPORT_BEGIN OUTCOME_V2_NAMESPACE_EXPORT_BEGIN namespace awaitables { +// +#define OUTCOME_COROUTINE_SUPPORT_NAMESPACE_END } OUTCOME_V2_NAMESPACE_END +#ifdef __cpp_exceptions +/* Tries to convert an exception ptr into its equivalent error code +(C) 2017-2019 Niall Douglas (11 commits) +File Created: July 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_UTILS_HPP +#define OUTCOME_UTILS_HPP +#include +#include +#include +OUTCOME_V2_NAMESPACE_BEGIN +#ifdef __cpp_exceptions +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +inline std::error_code error_from_exception(std::exception_ptr &&ep = std::current_exception(), std::error_code not_matched = std::make_error_code(std::errc::resource_unavailable_try_again)) noexcept +{ + if(!ep) + { + return {}; + } + try + { + std::rethrow_exception(ep); + } + catch(const std::invalid_argument & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::invalid_argument); + } + catch(const std::domain_error & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::argument_out_of_domain); + } + catch(const std::length_error & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::argument_list_too_long); + } + catch(const std::out_of_range & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::result_out_of_range); + } + catch(const std::logic_error & /*unused*/) /* base class for this group */ + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::invalid_argument); + } + catch(const std::system_error &e) /* also catches ios::failure */ + { + ep = std::exception_ptr(); + return e.code(); + } + catch(const std::overflow_error & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::value_too_large); + } + catch(const std::range_error & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::result_out_of_range); + } + catch(const std::runtime_error & /*unused*/) /* base class for this group */ + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::resource_unavailable_try_again); + } + catch(const std::bad_alloc & /*unused*/) + { + ep = std::exception_ptr(); + return std::make_error_code(std::errc::not_enough_memory); + } + catch(...) + { + } + return not_matched; +} +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +inline void try_throw_std_exception_from_error(std::error_code ec, const std::string &msg = std::string{}) +{ + if(!ec || (ec.category() != std::generic_category() +#ifndef _WIN32 + && ec.category() != std::system_category() +#endif + )) + { + return; + } + switch(ec.value()) + { + case EINVAL: + throw msg.empty() ? std::invalid_argument("invalid argument") : std::invalid_argument(msg); + case EDOM: + throw msg.empty() ? std::domain_error("domain error") : std::domain_error(msg); + case E2BIG: + throw msg.empty() ? std::length_error("length error") : std::length_error(msg); + case ERANGE: + throw msg.empty() ? std::out_of_range("out of range") : std::out_of_range(msg); + case EOVERFLOW: + throw msg.empty() ? std::overflow_error("overflow error") : std::overflow_error(msg); + case ENOMEM: + throw std::bad_alloc(); + } +} +#endif +OUTCOME_V2_NAMESPACE_END +#endif +OUTCOME_V2_NAMESPACE_BEGIN +namespace awaitables +{ + namespace detail + { + inline bool error_is_set(std::error_code ec) noexcept { return !!ec; } + inline std::error_code error_from_exception(std::exception_ptr &&ep, std::error_code not_matched) noexcept { return OUTCOME_V2_NAMESPACE::error_from_exception(static_cast(ep), not_matched); } + } // namespace detail +} // namespace awaitables +OUTCOME_V2_NAMESPACE_END +#endif +/* Tells C++ coroutines about Outcome's result +(C) 2019-2020 Niall Douglas (12 commits) +File Created: Oct 2019 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_DETAIL_COROUTINE_SUPPORT_HPP +#define OUTCOME_DETAIL_COROUTINE_SUPPORT_HPP +#include +#include +#if __cpp_impl_coroutine || (defined(_MSC_VER) && __cpp_coroutines) || (defined(__clang__) && __cpp_coroutines) +#ifndef OUTCOME_HAVE_NOOP_COROUTINE +#if defined(__has_builtin) +#if __has_builtin(__builtin_coro_noop) +#define OUTCOME_HAVE_NOOP_COROUTINE 1 +#endif +#endif +#endif +#ifndef OUTCOME_HAVE_NOOP_COROUTINE +#if _MSC_VER >= 1928 +#define OUTCOME_HAVE_NOOP_COROUTINE 1 +#else +#define OUTCOME_HAVE_NOOP_COROUTINE 0 +#endif +#endif +#if __has_include() +#include +OUTCOME_V2_NAMESPACE_BEGIN +namespace awaitables +{ + template using coroutine_handle = std::coroutine_handle; + template using coroutine_traits = std::coroutine_traits; + using std::suspend_always; + using std::suspend_never; +#if OUTCOME_HAVE_NOOP_COROUTINE + using std::noop_coroutine; +#endif +} // namespace awaitables +OUTCOME_V2_NAMESPACE_END +#define OUTCOME_FOUND_COROUTINE_HEADER 1 +#elif __has_include() +#include +OUTCOME_V2_NAMESPACE_BEGIN +namespace awaitables +{ + template using coroutine_handle = std::experimental::coroutine_handle; + template using coroutine_traits = std::experimental::coroutine_traits; + using std::experimental::suspend_always; + using std::experimental::suspend_never; +#if OUTCOME_HAVE_NOOP_COROUTINE + using std::experimental::noop_coroutine; +#endif +} // namespace awaitables +OUTCOME_V2_NAMESPACE_END +#define OUTCOME_FOUND_COROUTINE_HEADER 1 +#endif +#endif +OUTCOME_V2_NAMESPACE_EXPORT_BEGIN +namespace awaitables +{ + namespace detail + { + struct error_type_not_found + { + }; + struct exception_type_not_found + { + }; + template struct type_found + { + using type = T; + }; + template constexpr inline type_found extract_error_type(int /*unused*/) { return {}; } + template constexpr inline type_found extract_error_type(...) { return {}; } + template constexpr inline type_found extract_exception_type(int /*unused*/) { return {}; } + template constexpr inline type_found extract_exception_type(...) { return {}; } + OUTCOME_TEMPLATE(class T, class U) + OUTCOME_TREQUIRES(OUTCOME_TPRED(OUTCOME_V2_NAMESPACE::detail::is_constructible)) + inline bool try_set_error(T &&e, U *result) + { + new(result) U(static_cast(e)); + return true; + } + template inline bool try_set_error(T && /*unused*/, ...) { return false; } + OUTCOME_TEMPLATE(class T, class U) + OUTCOME_TREQUIRES(OUTCOME_TPRED(OUTCOME_V2_NAMESPACE::detail::is_constructible)) + inline void set_or_rethrow(T &e, U *result) { new(result) U(e); } + template inline void set_or_rethrow(T &e, ...) { rethrow_exception(e); } + template class fake_atomic + { + T _v; + public: + constexpr fake_atomic(T v) + : _v(v) + { + } + T load(std::memory_order /*unused*/) { return _v; } + void store(T v, std::memory_order /*unused*/) { _v = v; } + }; +#ifdef OUTCOME_FOUND_COROUTINE_HEADER + template struct outcome_promise_type + { + using container_type = typename Awaitable::container_type; + using result_set_type = std::conditional_t, fake_atomic>; + union + { + OUTCOME_V2_NAMESPACE::detail::empty_type _default{}; + container_type result; + }; + result_set_type result_set{false}; + coroutine_handle<> continuation; + outcome_promise_type() noexcept {} + outcome_promise_type(const outcome_promise_type &) = delete; + outcome_promise_type(outcome_promise_type &&) = delete; + outcome_promise_type &operator=(const outcome_promise_type &) = delete; + outcome_promise_type &operator=(outcome_promise_type &&) = delete; + ~outcome_promise_type() + { + if(result_set.load(std::memory_order_acquire)) + { + result.~container_type(); // could throw + } + } + auto get_return_object() + { + return Awaitable{*this}; // could throw bad_alloc + } + void return_value(container_type &&value) + { + assert(!result_set.load(std::memory_order_acquire)); + if(result_set.load(std::memory_order_acquire)) + { + result.~container_type(); // could throw + } + new(&result) container_type(static_cast(value)); // could throw + result_set.store(true, std::memory_order_release); + } + void return_value(const container_type &value) + { + assert(!result_set.load(std::memory_order_acquire)); + if(result_set.load(std::memory_order_acquire)) + { + result.~container_type(); // could throw + } + new(&result) container_type(value); // could throw + result_set.store(true, std::memory_order_release); + } + void unhandled_exception() + { + assert(!result_set.load(std::memory_order_acquire)); + if(result_set.load(std::memory_order_acquire)) + { + result.~container_type(); + } +#ifdef __cpp_exceptions + auto e = std::current_exception(); + auto ec = detail::error_from_exception(static_cast(e), {}); + // Try to set error code first + if(!detail::error_is_set(ec) || !detail::try_set_error(static_cast(ec), &result)) + { + detail::set_or_rethrow(e, &result); // could throw + } +#else + std::terminate(); +#endif + result_set.store(true, std::memory_order_release); + } + auto initial_suspend() noexcept + { + struct awaiter + { + bool await_ready() noexcept { return !suspend_initial; } + void await_resume() noexcept {} + void await_suspend(coroutine_handle<> /*unused*/) noexcept {} + }; + return awaiter{}; + } + auto final_suspend() noexcept + { + struct awaiter + { + bool await_ready() noexcept { return false; } + void await_resume() noexcept {} +#if OUTCOME_HAVE_NOOP_COROUTINE + coroutine_handle<> await_suspend(coroutine_handle self) noexcept + { + return self.promise().continuation ? self.promise().continuation : noop_coroutine(); + } +#else + void await_suspend(coroutine_handle self) + { + if(self.promise().continuation) + { + return self.promise().continuation.resume(); + } + } +#endif + }; + return awaiter{}; + } + }; + template struct outcome_promise_type + { + using container_type = void; + using result_set_type = std::conditional_t, fake_atomic>; + result_set_type result_set{false}; + coroutine_handle<> continuation; + outcome_promise_type() {} + outcome_promise_type(const outcome_promise_type &) = delete; + outcome_promise_type(outcome_promise_type &&) = delete; + outcome_promise_type &operator=(const outcome_promise_type &) = delete; + outcome_promise_type &operator=(outcome_promise_type &&) = delete; + ~outcome_promise_type() = default; + auto get_return_object() + { + return Awaitable{*this}; // could throw bad_alloc + } + void return_void() noexcept + { + assert(!result_set.load(std::memory_order_acquire)); + result_set.store(true, std::memory_order_release); + } + void unhandled_exception() + { + assert(!result_set.load(std::memory_order_acquire)); + std::rethrow_exception(std::current_exception()); // throws + } + auto initial_suspend() noexcept + { + struct awaiter + { + bool await_ready() noexcept { return !suspend_initial; } + void await_resume() noexcept {} + void await_suspend(coroutine_handle<> /*unused*/) noexcept {} + }; + return awaiter{}; + } + auto final_suspend() noexcept + { + struct awaiter + { + bool await_ready() noexcept { return false; } + void await_resume() noexcept {} +#if OUTCOME_HAVE_NOOP_COROUTINE + coroutine_handle<> await_suspend(coroutine_handle self) noexcept + { + return self.promise().continuation ? self.promise().continuation : noop_coroutine(); + } +#else + void await_suspend(coroutine_handle self) + { + if(self.promise().continuation) + { + return self.promise().continuation.resume(); + } + } +#endif + }; + return awaiter{}; + } + }; + template + constexpr inline auto move_result_from_promise_if_not_void(outcome_promise_type &p) + { + return static_cast(p.result); + } + template + constexpr inline void move_result_from_promise_if_not_void(outcome_promise_type & /*unused*/) + { + } + template struct OUTCOME_NODISCARD awaitable + { + using container_type = Cont; + using promise_type = outcome_promise_type::value>; + coroutine_handle _h; + awaitable(awaitable &&o) noexcept + : _h(static_cast &&>(o._h)) + { + o._h = nullptr; + } + awaitable(const awaitable &o) = delete; + awaitable &operator=(awaitable &&) = delete; // as per P1056 + awaitable &operator=(const awaitable &) = delete; + ~awaitable() + { + if(_h) + { + _h.destroy(); + } + } + explicit awaitable(promise_type &p) // could throw + : _h(coroutine_handle::from_promise(p)) + { + } + bool await_ready() noexcept { return _h.promise().result_set.load(std::memory_order_acquire); } + container_type await_resume() + { + assert(_h.promise().result_set.load(std::memory_order_acquire)); + if(!_h.promise().result_set.load(std::memory_order_acquire)) + { + std::terminate(); + } + return detail::move_result_from_promise_if_not_void(_h.promise()); + } +#if OUTCOME_HAVE_NOOP_COROUTINE + coroutine_handle<> await_suspend(coroutine_handle<> cont) noexcept + { + _h.promise().continuation = cont; + return _h; + } +#else + void await_suspend(coroutine_handle<> cont) + { + _h.promise().continuation = cont; + _h.resume(); + } +#endif + }; +#endif + } // namespace detail +} // namespace awaitables +OUTCOME_V2_NAMESPACE_END +#endif +#ifdef OUTCOME_FOUND_COROUTINE_HEADER +OUTCOME_V2_NAMESPACE_EXPORT_BEGIN namespace awaitables { +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template using eager = OUTCOME_V2_NAMESPACE::awaitables::detail::awaitable; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template using atomic_eager = OUTCOME_V2_NAMESPACE::awaitables::detail::awaitable; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template using lazy = OUTCOME_V2_NAMESPACE::awaitables::detail::awaitable; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template using atomic_lazy = OUTCOME_V2_NAMESPACE::awaitables::detail::awaitable; +} OUTCOME_V2_NAMESPACE_END +#endif +#undef OUTCOME_COROUTINE_SUPPORT_NAMESPACE_BEGIN +#undef OUTCOME_COROUTINE_SUPPORT_NAMESPACE_EXPORT_BEGIN +#undef OUTCOME_COROUTINE_SUPPORT_NAMESPACE_END +#endif +/* iostream specialisations for result and outcome +(C) 2017-2019 Niall Douglas (21 commits) +File Created: July 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_IOSTREAM_SUPPORT_HPP +#define OUTCOME_IOSTREAM_SUPPORT_HPP +/* A less simple result type +(C) 2017-2019 Niall Douglas (79 commits) +File Created: June 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_OUTCOME_HPP +#define OUTCOME_OUTCOME_HPP +/* A very simple result type +(C) 2017-2019 Niall Douglas (99 commits) +File Created: June 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_RESULT_HPP +#define OUTCOME_RESULT_HPP +/* A very simple result type +(C) 2017-2019 Niall Douglas (8 commits) +File Created: June 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_STD_RESULT_HPP +#define OUTCOME_STD_RESULT_HPP +/* A very simple result type +(C) 2017-2021 Niall Douglas (14 commits) +File Created: June 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_BASIC_RESULT_HPP +#define OUTCOME_BASIC_RESULT_HPP +/* Says how to convert value, error and exception types +(C) 2017-2019 Niall Douglas (12 commits) +File Created: Nov 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_CONVERT_HPP +#define OUTCOME_CONVERT_HPP +/* Storage for a very simple basic_result type +(C) 2017-2019 Niall Douglas (6 commits) +File Created: Oct 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_BASIC_RESULT_STORAGE_HPP +#define OUTCOME_BASIC_RESULT_STORAGE_HPP +/* Type sugar for success and failure +(C) 2017-2019 Niall Douglas (25 commits) +File Created: July 2017 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_SUCCESS_FAILURE_HPP +#define OUTCOME_SUCCESS_FAILURE_HPP +OUTCOME_V2_NAMESPACE_BEGIN +/*! AWAITING HUGO JSON CONVERSION TOOL +type definition template success_type. Potential doc page: `success_type` +*/ +template struct OUTCOME_NODISCARD success_type +{ + using value_type = T; +private: + value_type _value; + uint16_t _spare_storage{0}; +public: + success_type() = default; + success_type(const success_type &) = default; + success_type(success_type &&) = default; // NOLINT + success_type &operator=(const success_type &) = default; + success_type &operator=(success_type &&) = default; // NOLINT + ~success_type() = default; + OUTCOME_TEMPLATE(class U) + OUTCOME_TREQUIRES(OUTCOME_TPRED(!std::is_same>::value)) + constexpr explicit success_type(U &&v, uint16_t spare_storage = 0) + : _value(static_cast(v)) // NOLINT + , _spare_storage(spare_storage) + { + } + constexpr value_type &value() & { return _value; } + constexpr const value_type &value() const & { return _value; } + constexpr value_type &&value() && { return static_cast(_value); } + constexpr const value_type &&value() const && { return static_cast(_value); } + constexpr uint16_t spare_storage() const { return _spare_storage; } +}; +template <> struct OUTCOME_NODISCARD success_type +{ + using value_type = void; + constexpr uint16_t spare_storage() const { return 0; } +}; +/*! Returns type sugar for implicitly constructing a `basic_result` with a successful state, +default constructing `T` if necessary. +*/ +inline constexpr success_type success() noexcept +{ + return success_type{}; +} +/*! Returns type sugar for implicitly constructing a `basic_result` with a successful state. +\effects Copies or moves the successful state supplied into the returned type sugar. +*/ +template inline constexpr success_type> success(T &&v, uint16_t spare_storage = 0) +{ + return success_type>{static_cast(v), spare_storage}; +} +/*! AWAITING HUGO JSON CONVERSION TOOL +type definition template failure_type. Potential doc page: `failure_type` +*/ +template struct OUTCOME_NODISCARD failure_type +{ + using error_type = EC; + using exception_type = E; +private: + error_type _error; + exception_type _exception; + bool _have_error{false}, _have_exception{false}; + uint16_t _spare_storage{0}; + struct error_init_tag + { + }; + struct exception_init_tag + { + }; +public: + failure_type() = default; + failure_type(const failure_type &) = default; + failure_type(failure_type &&) = default; // NOLINT + failure_type &operator=(const failure_type &) = default; + failure_type &operator=(failure_type &&) = default; // NOLINT + ~failure_type() = default; + template + constexpr explicit failure_type(U &&u, V &&v, uint16_t spare_storage = 0) + : _error(static_cast(u)) + , _exception(static_cast(v)) + , _have_error(true) + , _have_exception(true) + , _spare_storage(spare_storage) + { + } + template + constexpr explicit failure_type(in_place_type_t /*unused*/, U &&u, uint16_t spare_storage = 0, error_init_tag /*unused*/ = error_init_tag()) + : _error(static_cast(u)) + , _exception() + , _have_error(true) + , _spare_storage(spare_storage) + { + } + template + constexpr explicit failure_type(in_place_type_t /*unused*/, U &&u, uint16_t spare_storage = 0, + exception_init_tag /*unused*/ = exception_init_tag()) + : _error() + , _exception(static_cast(u)) + , _have_exception(true) + , _spare_storage(spare_storage) + { + } + constexpr bool has_error() const { return _have_error; } + constexpr bool has_exception() const { return _have_exception; } + constexpr error_type &error() & { return _error; } + constexpr const error_type &error() const & { return _error; } + constexpr error_type &&error() && { return static_cast(_error); } + constexpr const error_type &&error() const && { return static_cast(_error); } + constexpr exception_type &exception() & { return _exception; } + constexpr const exception_type &exception() const & { return _exception; } + constexpr exception_type &&exception() && { return static_cast(_exception); } + constexpr const exception_type &&exception() const && { return static_cast(_exception); } + constexpr uint16_t spare_storage() const { return _spare_storage; } +}; +template struct OUTCOME_NODISCARD failure_type +{ + using error_type = EC; + using exception_type = void; +private: + error_type _error; + uint16_t _spare_storage{0}; +public: + failure_type() = default; + failure_type(const failure_type &) = default; + failure_type(failure_type &&) = default; // NOLINT + failure_type &operator=(const failure_type &) = default; + failure_type &operator=(failure_type &&) = default; // NOLINT + ~failure_type() = default; + OUTCOME_TEMPLATE(class U) + OUTCOME_TREQUIRES(OUTCOME_TPRED(!std::is_same>::value)) + constexpr explicit failure_type(U &&u, uint16_t spare_storage = 0) + : _error(static_cast(u)) // NOLINT + , _spare_storage(spare_storage) + { + } + constexpr error_type &error() & { return _error; } + constexpr const error_type &error() const & { return _error; } + constexpr error_type &&error() && { return static_cast(_error); } + constexpr const error_type &&error() const && { return static_cast(_error); } + constexpr uint16_t spare_storage() const { return _spare_storage; } +}; +template struct OUTCOME_NODISCARD failure_type +{ + using error_type = void; + using exception_type = E; +private: + exception_type _exception; + uint16_t _spare_storage{0}; +public: + failure_type() = default; + failure_type(const failure_type &) = default; + failure_type(failure_type &&) = default; // NOLINT + failure_type &operator=(const failure_type &) = default; + failure_type &operator=(failure_type &&) = default; // NOLINT + ~failure_type() = default; + OUTCOME_TEMPLATE(class V) + OUTCOME_TREQUIRES(OUTCOME_TPRED(!std::is_same>::value)) + constexpr explicit failure_type(V &&v, uint16_t spare_storage = 0) + : _exception(static_cast(v)) // NOLINT + , _spare_storage(spare_storage) + { + } + constexpr exception_type &exception() & { return _exception; } + constexpr const exception_type &exception() const & { return _exception; } + constexpr exception_type &&exception() && { return static_cast(_exception); } + constexpr const exception_type &&exception() const && { return static_cast(_exception); } + constexpr uint16_t spare_storage() const { return _spare_storage; } +}; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template inline constexpr failure_type> failure(EC &&v, uint16_t spare_storage = 0) +{ + return failure_type>{static_cast(v), spare_storage}; +} +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template inline constexpr failure_type, std::decay_t> failure(EC &&v, E &&w, uint16_t spare_storage = 0) +{ + return failure_type, std::decay_t>{static_cast(v), static_cast(w), spare_storage}; +} +namespace detail +{ + template struct is_success_type + { + static constexpr bool value = false; + }; + template struct is_success_type> + { + static constexpr bool value = true; + }; + template struct is_failure_type + { + static constexpr bool value = false; + }; + template struct is_failure_type> + { + static constexpr bool value = true; + }; +} // namespace detail +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template static constexpr bool is_success_type = detail::is_success_type>::value; +/*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ +template static constexpr bool is_failure_type = detail::is_failure_type>::value; +OUTCOME_V2_NAMESPACE_END +#endif +/* Traits for Outcome +(C) 2018-2019 Niall Douglas (8 commits) +File Created: March 2018 + + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License in the accompanying file +Licence.txt or at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +Distributed under the Boost Software License, Version 1.0. + (See accompanying file Licence.txt or copy at + http://www.boost.org/LICENSE_1_0.txt) +*/ +#ifndef OUTCOME_TRAIT_HPP +#define OUTCOME_TRAIT_HPP +OUTCOME_V2_NAMESPACE_EXPORT_BEGIN +namespace trait +{ + /*! AWAITING HUGO JSON CONVERSION TOOL +SIGNATURE NOT RECOGNISED +*/ + template // + static constexpr bool type_can_be_used_in_basic_result = // + (!std::is_reference::value // + && !OUTCOME_V2_NAMESPACE::detail::is_in_place_type_t>::value // + && !is_success_type // + && !is_failure_type // + && !std::is_array::value // + && (std::is_void::value || (std::is_object::value // + && std::is_destructible::value)) // + ); + /*! AWAITING HUGO JSON CONVERSION TOOL +type definition is_error_type. Potential doc page: NOT FOUND +*/ + template struct is_move_bitcopying + { + static constexpr bool value = false; + }; + /*! AWAITING HUGO JSON CONVERSION TOOL +type definition is_error_type. Potential doc page: NOT FOUND +*/ + template struct is_error_type + { + static constexpr bool value = false; + }; + /*! AWAITING HUGO JSON CONVERSION TOOL +type definition is_error_type_enum. Potential doc page: NOT FOUND +*/ + template struct is_error_type_enum + { + static constexpr bool value = false; + }; + namespace detail + { + template using devoid = OUTCOME_V2_NAMESPACE::detail::devoid; + template std::add_rvalue_reference_t> declval() noexcept; + // From http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2015/n4436.pdf + namespace detector_impl + { + template using void_t = void; + template class Op, class... Args> struct detector + { + static constexpr bool value = false; + using type = Default; + }; + template class Op, class... Args> struct detector>, Op, Args...> + { + static constexpr bool value = true; + using type = Op; + }; + } // namespace detector_impl + template