Compare commits

...

8 Commits

Author SHA1 Message Date
Guillermo Oyarzun
7e1b77c564 chore(gpu): testing2 a 2025-09-21 14:43:31 +02:00
Guillermo Oyarzun
f025c165d8 lala 2025-09-21 14:22:25 +02:00
Guillermo Oyarzun
1fe0bb1727 chore(gpu): test silly comment 2025-09-21 14:19:35 +02:00
Nicolas Sarlin
4a73b7bb4b fix(versionable): use full type path in proc macro
This avoids name clashes if user re-defines the type
2025-09-19 16:03:56 +02:00
Guillermo Oyarzun
022cb3b18a fix(gpu): avoid out of memory when benchmarking throughput 2025-09-19 14:44:12 +02:00
David Testé
c4feabbfa3 chore(ci): revert package-lock.json 2025-09-19 09:30:15 +02:00
David Testé
3c6ed37a18 chore(ci): factorize release workflows by using a sub-workflow 2025-09-18 17:52:34 +02:00
Agnes Leroy
fe6e81ff78 chore(gpu): post hackathon cleanup 2025-09-18 16:30:45 +02:00
25 changed files with 428 additions and 879 deletions

View File

@@ -21,20 +21,20 @@ env:
permissions: { }
jobs:
verify-actor:
verify-triggering-actor:
name: benchmark_perf_regression/verify-actor
if: (github.event_name == 'pull_request' &&
(contains(github.event.label.name, 'bench-perfs-cpu') ||
contains(github.event.label.name, 'bench-perfs-gpu'))) ||
(github.event.issue.pull_request && startsWith(github.event.comment.body, '/bench'))
uses: ./.github/workflows/verify_commit_actor.yml
uses: ./.github/workflows/verify_triggering_actor.yml
secrets:
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
prepare-benchmarks:
name: benchmark_perf_regression/prepare-benchmarks
needs: verify-actor
needs: verify-triggering-actor
runs-on: ubuntu-latest
outputs:
commands: ${{ steps.set_commands.outputs.commands }}

View File

@@ -1,33 +1,31 @@
# Publish new release of tfhe-rs on various platform.
name: make_release
# Common workflow to make crate release
name: make_release_common
on:
workflow_dispatch:
workflow_call:
inputs:
dry_run:
description: "Dry-run"
package-name:
type: string
required: true
dry-run:
type: boolean
default: true
push_to_crates:
description: "Push to crate"
type: boolean
default: true
push_web_package:
description: "Push web js package"
type: boolean
default: true
push_node_package:
description: "Push node js package"
type: boolean
default: true
npm_latest_tag:
description: "Set NPM tag as latest"
type: boolean
default: false
secrets:
REPO_CHECKOUT_TOKEN:
required: true
SLACK_CHANNEL:
required: true
BOT_USERNAME:
required: true
SLACK_WEBHOOK:
required: true
ALLOWED_TEAM:
required: true
READ_ORG_TOKEN:
required: true
env:
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
NPM_TAG: ""
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
@@ -36,18 +34,18 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release/verify-tag
verify-triggering-actor:
name: make_release_common/verify-triggering-actor
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
uses: ./.github/workflows/verify_triggering_actor.yml
secrets:
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
ALLOWED_TEAM: ${{ secrets.ALLOWED_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release/package
name: make_release_common/package
runs-on: ubuntu-latest
needs: verify-tag
needs: verify-triggering-actor
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
@@ -58,20 +56,23 @@ jobs:
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
env:
PACKAGE: ${{ inputs.package-name }}
run: |
cargo package -p tfhe
cargo package -p "${PACKAGE}"
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate
name: crate-${{ inputs.package-name }}
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release/provenance
if: ${{ !inputs.dry_run }}
needs: [package]
name: make_release_common/provenance
if: ${{ !inputs.dry-run }}
needs: package
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
@@ -84,14 +85,14 @@ jobs:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release/publish_release
needs: [package] # for comparing hashes
name: make_release_common/publish-release
needs: package
runs-on: ubuntu-latest
# For provenance of npmjs publish
permissions:
contents: read
id-token: write # also needed for OIDC token exchange on crates.io
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
@@ -99,28 +100,27 @@ jobs:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Create NPM version tag
if: ${{ inputs.npm_latest_tag }}
run: |
echo "NPM_TAG=latest" >> "${GITHUB_ENV}"
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: crate
name: crate-${{ inputs.package-name }}
path: target/package
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
if: ${{ inputs.push_to_crates }}
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
PACKAGE: ${{ inputs.package-name }}
DRY_RUN: ${{ inputs.dry-run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe ${DRY_RUN}
cargo publish -p "${PACKAGE}" ${DRY_RUN}
- name: Generate hash
id: published_hash
@@ -132,45 +132,12 @@ jobs:
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe crate - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Build web package
if: ${{ inputs.push_web_package }}
run: |
make build_web_js_api_parallel
- name: Publish web package
if: ${{ inputs.push_web_package }}
uses: JS-DevTools/npm-publish@19c28f1ef146469e409470805ea4279d47c3d35c
with:
token: ${{ secrets.NPM_TOKEN }}
package: tfhe/pkg/package.json
dry-run: ${{ inputs.dry_run }}
tag: ${{ env.NPM_TAG }}
provenance: true
- name: Build Node package
if: ${{ inputs.push_node_package }}
run: |
rm -rf tfhe/pkg
make build_node_js_api
sed -i 's/"tfhe"/"node-tfhe"/g' tfhe/pkg/package.json
- name: Publish Node package
if: ${{ inputs.push_node_package }}
uses: JS-DevTools/npm-publish@19c28f1ef146469e409470805ea4279d47c3d35c
with:
token: ${{ secrets.NPM_TOKEN }}
package: tfhe/pkg/package.json
dry-run: ${{ inputs.dry_run }}
tag: ${{ env.NPM_TAG }}
provenance: true
SLACK_MESSAGE: "SLSA ${{ inputs.package-name }} - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
if: ${{ failure() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe release failed: (${{ env.ACTION_RUN_URL }})"
SLACK_MESSAGE: "${{ inputs.package-name }} release finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"

View File

@@ -18,17 +18,17 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release_cuda/verify-tag
verify-triggering-actor:
name: make_release_cuda/verify-triggering-actor
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
uses: ./.github/workflows/verify_triggering_actor.yml
secrets:
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
setup-instance:
name: make_release_cuda/setup-instance
needs: verify-tag
needs: verify-triggering-actor
runs-on: ubuntu-latest
outputs:
runner-name: ${{ steps.start-instance.outputs.label }}

View File

@@ -18,99 +18,16 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release_hpu/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release:
name: make_release_hpu/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-hpu-backend"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release_hpu/package
runs-on: ubuntu-latest
needs: verify-tag
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-hpu-backend
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_hpu/provenance
if: ${{ !inputs.dry_run }}
needs: [package]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_hpu/publish-release
runs-on: ubuntu-latest
needs: [verify-tag, package] # for comparing hashes
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe-hpu-backend ${DRY_RUN}
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-hpu-backend crate - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-hpu-backend release failed: (${{ env.ACTION_RUN_URL }})"

113
.github/workflows/make_release_tfhe.yml vendored Normal file
View File

@@ -0,0 +1,113 @@
# Publish new release of tfhe-rs on various platform.
name: make_release_tfhe
on:
workflow_dispatch:
inputs:
dry_run:
description: "Dry-run"
type: boolean
default: true
push_to_crates:
description: "Push to crate"
type: boolean
default: true
push_web_package:
description: "Push web js package"
type: boolean
default: true
push_node_package:
description: "Push node js package"
type: boolean
default: true
npm_latest_tag:
description: "Set NPM tag as latest"
type: boolean
default: false
env:
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
NPM_TAG: ""
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
permissions: {}
jobs:
make-release:
name: make_release_tfhe/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
make-release-js:
name: make_release_tfhe/make-release-js
needs: make-release
runs-on: ubuntu-latest
# For provenance of npmjs publish
permissions:
contents: read
id-token: write # also needed for OIDC token exchange on crates.io
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Create NPM version tag
if: ${{ inputs.npm_latest_tag }}
run: |
echo "NPM_TAG=latest" >> "${GITHUB_ENV}"
- name: Build web package
if: ${{ inputs.push_web_package }}
run: |
make build_web_js_api_parallel
- name: Publish web package
if: ${{ inputs.push_web_package }}
uses: JS-DevTools/npm-publish@19c28f1ef146469e409470805ea4279d47c3d35c
with:
token: ${{ secrets.NPM_TOKEN }}
package: tfhe/pkg/package.json
dry-run: ${{ inputs.dry_run }}
tag: ${{ env.NPM_TAG }}
provenance: true
- name: Build Node package
if: ${{ inputs.push_node_package }}
run: |
rm -rf tfhe/pkg
make build_node_js_api
sed -i 's/"tfhe"/"node-tfhe"/g' tfhe/pkg/package.json
- name: Publish Node package
if: ${{ inputs.push_node_package }}
uses: JS-DevTools/npm-publish@19c28f1ef146469e409470805ea4279d47c3d35c
with:
token: ${{ secrets.NPM_TOKEN }}
package: tfhe/pkg/package.json
dry-run: ${{ inputs.dry_run }}
tag: ${{ env.NPM_TAG }}
provenance: true
- name: Slack Notification
if: ${{ failure() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe release finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"

View File

@@ -8,110 +8,19 @@ on:
type: boolean
default: true
env:
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
permissions: {}
jobs:
verify-tag:
name: make_release_tfhe_csprng/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release:
name: make_release_tfhe_csprng/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-csprng"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release_tfhe_csprng/package
runs-on: ubuntu-latest
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-csprng
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate-tfhe-csprng
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_tfhe_csprng/provenance
if: ${{ !inputs.dry_run }}
needs: [package]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_tfhe_csprng/publish-release
needs: [verify-tag, package]
runs-on: ubuntu-latest
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: crate-tfhe-csprng
path: target/package
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe-csprng ${DRY_RUN}
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-csprng - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-csprng release finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"

View File

@@ -19,99 +19,16 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release_tfhe_fft/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release:
name: make_release_tfhe_fft/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-fft"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release_tfhe_fft/package
runs-on: ubuntu-latest
needs: verify-tag
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-fft
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_tfhe_fft/provenance
if: ${{ !inputs.dry_run }}
needs: [package]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_tfhe_fft/publish-release
runs-on: ubuntu-latest
needs: [verify-tag, package] # for comparing hashes
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe-fft ${DRY_RUN}
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-fft crate - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-fft release failed: (${{ env.ACTION_RUN_URL }})"

View File

@@ -19,99 +19,16 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release_tfhe_ntt/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release:
name: make_release_tfhe_ntt/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-ntt"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release_tfhe_ntt/package
runs-on: ubuntu-latest
needs: verify-tag
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-ntt
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_tfhe_ntt/provenance
if: ${{ !inputs.dry_run }}
needs: [package]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_tfhe_ntt/publish-release
runs-on: ubuntu-latest
needs: [verify-tag, package] # for comparing hashes
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe-ntt ${DRY_RUN}
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-ntt crate - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-ntt release failed: (${{ env.ACTION_RUN_URL }})"

View File

@@ -2,6 +2,11 @@ name: make_release_tfhe_versionable
on:
workflow_dispatch:
inputs:
dry_run:
description: "Dry-run"
type: boolean
default: true
env:
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
@@ -13,174 +18,31 @@ env:
permissions: {}
jobs:
verify-tag:
name: make_release_tfhe_versionable/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release-derive:
name: make_release_tfhe_versionable/make-release-derive
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-versionable-derive"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package-derive:
name: make_release_tfhe_versionable/package-derive
runs-on: ubuntu-latest
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-versionable-derive
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate-tfhe-versionable-derive
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance-derive:
name: make_release_tfhe_versionable/provenance-derive
needs: [package-derive]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
make-release:
name: make_release_tfhe_versionable/make-release
needs: make-release-derive
uses: ./.github/workflows/make_release_common.yml
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package-derive.outputs.hash }}
publish_release-derive:
name: make_release_tfhe_versionable/publish_release_derive
needs: [ verify-tag, package-derive ] # for comparing hashes
runs-on: ubuntu-latest
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: crate-tfhe-versionable-derive
path: target/package
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
run: |
cargo publish -p tfhe-versionable-derive
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package-derive.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-versionable-derive - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-versionable-derive release finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
package:
name: make_release_tfhe_versionable/package
needs: publish_release-derive
runs-on: ubuntu-latest
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-versionable
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate-tfhe-versionable
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_tfhe_versionable/provenance
needs: package
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_tfhe_versionable/publish-release
needs: package # for comparing hashes
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: crate-tfhe-versionable
path: target/package
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
run: |
cargo publish -p tfhe-versionable
- name: Generate hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-versionable - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-versionable release finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
package-name: "tfhe-versionable"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}

View File

@@ -18,99 +18,16 @@ env:
permissions: { }
jobs:
verify-tag:
name: make_release_zk_pok/verify-tag
if: startsWith(github.ref, 'refs/tags/')
uses: ./.github/workflows/verify_commit_actor.yml
make-release:
name: make_release_zk_pok/make-release
uses: ./.github/workflows/make_release_common.yml
with:
package-name: "tfhe-zk-pok"
dry-run: ${{ inputs.dry_run }}
secrets:
BOT_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO_CHECKOUT_TOKEN: ${{ secrets.REPO_CHECKOUT_TOKEN }}
ALLOWED_TEAM: ${{ secrets.RELEASE_TEAM }}
READ_ORG_TOKEN: ${{ secrets.READ_ORG_TOKEN }}
package:
name: make_release_zk_pok/package
runs-on: ubuntu-latest
needs: verify-tag
outputs:
hash: ${{ steps.hash.outputs.hash }}
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Prepare package
run: |
cargo package -p tfhe-zk-pok
- uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: crate-zk-pok
path: target/package/*.crate
- name: generate hash
id: hash
run: cd target/package && echo "hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
provenance:
name: make_release_zk_pok/provenance
if: ${{ !inputs.dry_run }}
needs: [ package ]
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@v2.1.0
permissions:
# Needed to detect the GitHub Actions environment
actions: read
# Needed to create the provenance via GitHub OIDC
id-token: write
# Needed to upload assets/artifacts
contents: write
with:
# SHA-256 hashes of the Crate package.
base64-subjects: ${{ needs.package.outputs.hash }}
publish_release:
name: make_release_zk_pok/publish-release
needs: [ verify-tag, package ] # for comparing hashes
runs-on: ubuntu-latest
permissions:
# Needed for OIDC token exchange on crates.io
id-token: write
steps:
- name: Checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
fetch-depth: 0
persist-credentials: 'false'
token: ${{ secrets.REPO_CHECKOUT_TOKEN }}
- name: Download artifact
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: crate-zk-pok
path: target/package
- name: Authenticate on registry
uses: rust-lang/crates-io-auth-action@e919bc7605cde86df457cf5b93c5e103838bd879 # v1.0.1
id: auth
- name: Publish crate.io package
env:
CARGO_REGISTRY_TOKEN: ${{ steps.auth.outputs.token }}
DRY_RUN: ${{ inputs.dry_run && '--dry-run' || '' }}
run: |
# DRY_RUN expansion cannot be double quoted when variable contains empty string otherwise cargo publish
# would fail. This is safe since DRY_RUN is handled in the env section above.
# shellcheck disable=SC2086
cargo publish -p tfhe-zk-pok ${DRY_RUN}
- name: Verify hash
id: published_hash
run: cd target/package && echo "pub_hash=$(sha256sum ./*.crate | base64 -w0)" >> "${GITHUB_OUTPUT}"
- name: Slack notification (hashes comparison)
if: ${{ needs.package.outputs.hash != steps.published_hash.outputs.pub_hash }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: failure
SLACK_MESSAGE: "SLSA tfhe-zk-pok crate - hash comparison failure: (${{ env.ACTION_RUN_URL }})"
- name: Slack Notification
if: ${{ failure() || (cancelled() && github.event_name != 'pull_request') }}
continue-on-error: true
uses: rtCamp/action-slack-notify@e31e87e03dd19038e411e38ae27cbad084a90661 # v2.3.3
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-zk-pok release failed: (${{ env.ACTION_RUN_URL }})"

View File

@@ -1,5 +1,5 @@
# Verify a commit actor
name: verify_commit_actor
# Verify a triggering actor
name: verify_triggering_actor
on:
workflow_call:
@@ -13,7 +13,7 @@ permissions: {}
jobs:
check-actor:
name: verify_commit_actor/check-actor
name: verify_triggering_actor/check-actor
runs-on: ubuntu-latest
steps:
# Check triggering actor membership

View File

@@ -333,8 +333,8 @@ uint64_t scratch_cuda_propagate_single_carry_kb_64_inplace(
uint32_t small_lwe_dimension, uint32_t ks_level, uint32_t ks_base_log,
uint32_t pbs_level, uint32_t pbs_base_log, uint32_t grouping_factor,
uint32_t num_blocks, uint32_t message_modulus, uint32_t carry_modulus,
PBS_TYPE pbs_type, uint32_t requested_flag, uint32_t uses_carry,
bool allocate_gpu_memory, PBS_MS_REDUCTION_T noise_reduction_type);
PBS_TYPE pbs_type, uint32_t requested_flag, bool allocate_gpu_memory,
PBS_MS_REDUCTION_T noise_reduction_type);
uint64_t scratch_cuda_add_and_propagate_single_carry_kb_64_inplace(
CudaStreamsFFI streams, int8_t **mem_ptr, uint32_t glwe_dimension,
@@ -342,8 +342,8 @@ uint64_t scratch_cuda_add_and_propagate_single_carry_kb_64_inplace(
uint32_t small_lwe_dimension, uint32_t ks_level, uint32_t ks_base_log,
uint32_t pbs_level, uint32_t pbs_base_log, uint32_t grouping_factor,
uint32_t num_blocks, uint32_t message_modulus, uint32_t carry_modulus,
PBS_TYPE pbs_type, uint32_t requested_flag, uint32_t uses_carry,
bool allocate_gpu_memory, PBS_MS_REDUCTION_T noise_reduction_type);
PBS_TYPE pbs_type, uint32_t requested_flag, bool allocate_gpu_memory,
PBS_MS_REDUCTION_T noise_reduction_type);
void cuda_propagate_single_carry_kb_64_inplace(
CudaStreamsFFI streams, CudaRadixCiphertextFFI *lwe_array,

View File

@@ -244,8 +244,6 @@ struct int_radix_params {
uint32_t carry_modulus;
PBS_MS_REDUCTION_T noise_reduction_type;
int_radix_params(){};
int_radix_params(PBS_TYPE pbs_type, uint32_t glwe_dimension,
uint32_t polynomial_size, uint32_t big_lwe_dimension,
uint32_t small_lwe_dimension, uint32_t ks_level,
@@ -262,6 +260,8 @@ struct int_radix_params {
message_modulus(message_modulus), carry_modulus(carry_modulus),
noise_reduction_type(noise_reduction_type){};
int_radix_params() = default;
void print() {
printf("pbs_type: %u, glwe_dimension: %u, "
"polynomial_size: %u, "
@@ -756,18 +756,20 @@ template <typename Torus> struct int_radix_lut {
CudaStreams streams, uint64_t max_num_radix_blocks,
uint64_t &size_tracker, bool allocate_gpu_memory) {
// We need to create the auxiliary array only in GPU 0
lwe_aligned_vec.resize(active_streams.count());
for (uint i = 0; i < active_streams.count(); i++) {
uint64_t size_tracker_on_array_i = 0;
auto inputs_on_gpu = std::max(
THRESHOLD_MULTI_GPU, get_num_inputs_on_gpu(max_num_radix_blocks, i,
active_streams.count()));
Torus *d_array = (Torus *)cuda_malloc_with_size_tracking_async(
inputs_on_gpu * (params.big_lwe_dimension + 1) * sizeof(Torus),
streams.stream(0), streams.gpu_index(0), size_tracker_on_array_i,
allocate_gpu_memory);
lwe_aligned_vec[i] = d_array;
size_tracker += size_tracker_on_array_i;
if (active_streams.count() > 1) {
lwe_aligned_vec.resize(active_streams.count());
for (uint i = 0; i < active_streams.count(); i++) {
uint64_t size_tracker_on_array_i = 0;
auto inputs_on_gpu = std::max(
THRESHOLD_MULTI_GPU, get_num_inputs_on_gpu(max_num_radix_blocks, i,
active_streams.count()));
Torus *d_array = (Torus *)cuda_malloc_with_size_tracking_async(
inputs_on_gpu * (params.big_lwe_dimension + 1) * sizeof(Torus),
streams.stream(0), streams.gpu_index(0), size_tracker_on_array_i,
allocate_gpu_memory);
lwe_aligned_vec[i] = d_array;
size_tracker += size_tracker_on_array_i;
}
}
}
@@ -1632,8 +1634,19 @@ template <typename Torus> struct int_sum_ciphertexts_vec_memory {
luts_message_carry = new int_radix_lut<Torus>(
streams, params, 2, pbs_count, true, size_tracker);
allocated_luts_message_carry = true;
uint64_t message_modulus_bits =
(uint64_t)std::log2(params.message_modulus);
uint64_t carry_modulus_bits = (uint64_t)std::log2(params.carry_modulus);
uint64_t total_bits_per_block =
message_modulus_bits + carry_modulus_bits;
uint64_t denominator =
(uint64_t)std::ceil((pow(2, total_bits_per_block) - 1) /
(pow(2, message_modulus_bits) - 1));
uint64_t upper_bound_num_blocks =
max_total_blocks_in_vec * 2 / denominator;
luts_message_carry->allocate_lwe_vector_for_non_trivial_indexes(
streams, this->max_total_blocks_in_vec, size_tracker, true);
streams, upper_bound_num_blocks, size_tracker, true);
}
}
if (allocated_luts_message_carry) {
@@ -1731,9 +1744,17 @@ template <typename Torus> struct int_sum_ciphertexts_vec_memory {
this->current_blocks = current_blocks;
this->small_lwe_vector = small_lwe_vector;
this->luts_message_carry = reused_lut;
uint64_t message_modulus_bits = (uint64_t)std::log2(params.message_modulus);
uint64_t carry_modulus_bits = (uint64_t)std::log2(params.carry_modulus);
uint64_t total_bits_per_block = message_modulus_bits + carry_modulus_bits;
uint64_t denominator =
(uint64_t)std::ceil((pow(2, total_bits_per_block) - 1) /
(pow(2, message_modulus_bits) - 1));
uint64_t upper_bound_num_blocks = max_total_blocks_in_vec * 2 / denominator;
this->luts_message_carry->allocate_lwe_vector_for_non_trivial_indexes(
streams, this->max_total_blocks_in_vec, size_tracker,
allocate_gpu_memory);
streams, upper_bound_num_blocks, size_tracker, allocate_gpu_memory);
setup_index_buffers(streams, size_tracker);
}
@@ -2404,8 +2425,7 @@ template <typename Torus> struct int_sc_prop_memory {
int_sc_prop_memory(CudaStreams streams, int_radix_params params,
uint32_t num_radix_blocks, uint32_t requested_flag_in,
uint32_t uses_carry, bool allocate_gpu_memory,
uint64_t &size_tracker) {
bool allocate_gpu_memory, uint64_t &size_tracker) {
gpu_memory_allocated = allocate_gpu_memory;
this->params = params;
auto glwe_dimension = params.glwe_dimension;
@@ -3127,11 +3147,10 @@ template <typename Torus> struct int_mul_memory {
streams, params, num_radix_blocks, 2 * num_radix_blocks,
vector_result_sb, small_lwe_vector, luts_array, true,
allocate_gpu_memory, size_tracker);
uint32_t uses_carry = 0;
uint32_t requested_flag = outputFlag::FLAG_NONE;
sc_prop_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, requested_flag, uses_carry,
allocate_gpu_memory, size_tracker);
streams, params, num_radix_blocks, requested_flag, allocate_gpu_memory,
size_tracker);
}
void release(CudaStreams streams) {
@@ -3731,36 +3750,13 @@ template <typename Torus> struct int_comparison_eq_buffer {
gpu_memory_allocated = allocate_gpu_memory;
this->params = params;
this->op = op;
Torus total_modulus = params.message_modulus * params.carry_modulus;
are_all_block_true_buffer = new int_are_all_block_true_buffer<Torus>(
streams, op, params, num_radix_blocks, allocate_gpu_memory,
size_tracker);
// Operator LUT
auto operator_f = [op](Torus lhs, Torus rhs) -> Torus {
if (op == COMPARISON_TYPE::EQ) {
// EQ
return (lhs == rhs);
} else {
// NE
return (lhs != rhs);
}
};
operator_lut =
new int_radix_lut<Torus>(streams, params, 1, num_radix_blocks,
allocate_gpu_memory, size_tracker);
generate_device_accumulator_bivariate<Torus>(
streams.stream(0), streams.gpu_index(0), operator_lut->get_lut(0, 0),
operator_lut->get_degree(0), operator_lut->get_max_degree(0),
params.glwe_dimension, params.polynomial_size, params.message_modulus,
params.carry_modulus, operator_f, gpu_memory_allocated);
auto active_streams = streams.active_gpu_subset(num_radix_blocks);
operator_lut->broadcast_lut(active_streams);
// f(x) -> x == 0
Torus total_modulus = params.message_modulus * params.carry_modulus;
auto is_non_zero_lut_f = [total_modulus](Torus x) -> Torus {
return (x % total_modulus) != 0;
};
@@ -3775,38 +3771,74 @@ template <typename Torus> struct int_comparison_eq_buffer {
params.glwe_dimension, params.polynomial_size, params.message_modulus,
params.carry_modulus, is_non_zero_lut_f, gpu_memory_allocated);
auto active_streams = streams.active_gpu_subset(num_radix_blocks);
is_non_zero_lut->broadcast_lut(active_streams);
// Scalar may have up to num_radix_blocks blocks
scalar_comparison_luts = new int_radix_lut<Torus>(
streams, params, total_modulus, num_radix_blocks, allocate_gpu_memory,
size_tracker);
for (int i = 0; i < total_modulus; i++) {
auto lut_f = [i, operator_f](Torus x) -> Torus {
return operator_f(i, x);
if (op == COMPARISON_TYPE::EQ || COMPARISON_TYPE::NE) {
// Operator LUT
auto operator_f = [op](Torus lhs, Torus rhs) -> Torus {
if (op == COMPARISON_TYPE::EQ) {
return (lhs == rhs);
} else if (op == COMPARISON_TYPE::NE) {
return (lhs != rhs);
PANIC("Cuda error (eq/ne): invalid comparison type")
}
};
// Scalar may have up to num_radix_blocks blocks
scalar_comparison_luts = new int_radix_lut<Torus>(
streams, params, total_modulus, num_radix_blocks, allocate_gpu_memory,
size_tracker);
generate_device_accumulator<Torus>(
streams.stream(0), streams.gpu_index(0),
scalar_comparison_luts->get_lut(0, i),
scalar_comparison_luts->get_degree(i),
scalar_comparison_luts->get_max_degree(i), params.glwe_dimension,
params.polynomial_size, params.message_modulus, params.carry_modulus,
lut_f, gpu_memory_allocated);
for (int i = 0; i < total_modulus; i++) {
auto lut_f = [i, operator_f](Torus x) -> Torus {
return operator_f(i, x);
};
generate_device_accumulator<Torus>(
streams.stream(0), streams.gpu_index(0),
scalar_comparison_luts->get_lut(0, i),
scalar_comparison_luts->get_degree(i),
scalar_comparison_luts->get_max_degree(i), params.glwe_dimension,
params.polynomial_size, params.message_modulus,
params.carry_modulus, lut_f, gpu_memory_allocated);
}
scalar_comparison_luts->broadcast_lut(active_streams);
operator_lut =
new int_radix_lut<Torus>(streams, params, 1, num_radix_blocks,
allocate_gpu_memory, size_tracker);
generate_device_accumulator_bivariate<Torus>(
streams.stream(0), streams.gpu_index(0), operator_lut->get_lut(0, 0),
operator_lut->get_degree(0), operator_lut->get_max_degree(0),
params.glwe_dimension, params.polynomial_size, params.message_modulus,
params.carry_modulus, operator_f, gpu_memory_allocated);
operator_lut->broadcast_lut(active_streams);
} else {
scalar_comparison_luts = nullptr;
operator_lut = nullptr;
}
scalar_comparison_luts->broadcast_lut(active_streams);
}
void release(CudaStreams streams) {
operator_lut->release(streams);
delete operator_lut;
if (op == COMPARISON_TYPE::EQ || COMPARISON_TYPE::NE) {
PANIC_IF_FALSE(operator_lut != nullptr,
"Cuda error: no operator lut was created");
operator_lut->release(streams);
delete operator_lut;
operator_lut = nullptr;
PANIC_IF_FALSE(scalar_comparison_luts != nullptr,
"Cuda error: no scalar comparison luts were created");
scalar_comparison_luts->release(streams);
delete scalar_comparison_luts;
scalar_comparison_luts = nullptr;
}
is_non_zero_lut->release(streams);
delete is_non_zero_lut;
scalar_comparison_luts->release(streams);
delete scalar_comparison_luts;
is_non_zero_lut = nullptr;
are_all_block_true_buffer->release(streams);
delete are_all_block_true_buffer;
are_all_block_true_buffer = nullptr;
}
};
@@ -3926,8 +3958,7 @@ template <typename Torus> struct int_comparison_diff_buffer {
case LE:
return (x == IS_INFERIOR) || (x == IS_EQUAL);
default:
// We don't need a default case but we need to return something
return 42;
PANIC("Cuda error (comparisons): unknown comparison type")
}
};
@@ -4922,11 +4953,10 @@ template <typename Torus> struct int_scalar_mul_buffer {
streams, params, num_radix_blocks, num_ciphertext_bits, true,
allocate_gpu_memory, last_step_mem);
}
uint32_t uses_carry = 0;
uint32_t requested_flag = outputFlag::FLAG_NONE;
sc_prop_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, requested_flag, uses_carry,
allocate_gpu_memory, last_step_mem);
streams, params, num_radix_blocks, requested_flag, allocate_gpu_memory,
last_step_mem);
if (anticipated_buffer_drop) {
size_tracker += std::max(anticipated_drop_mem, last_step_mem);
} else {
@@ -4982,10 +5012,9 @@ template <typename Torus> struct int_abs_buffer {
streams, SHIFT_OR_ROTATE_TYPE::RIGHT_SHIFT, params, num_radix_blocks,
allocate_gpu_memory, size_tracker);
uint32_t requested_flag = outputFlag::FLAG_NONE;
uint32_t uses_carry = 0;
scp_mem = new int_sc_prop_memory<Torus>(streams, params, num_radix_blocks,
requested_flag, uses_carry,
allocate_gpu_memory, size_tracker);
requested_flag, allocate_gpu_memory,
size_tracker);
bitxor_mem = new int_bitop_buffer<Torus>(streams, BITOP_TYPE::BITXOR,
params, num_radix_blocks,
allocate_gpu_memory, size_tracker);
@@ -5061,13 +5090,12 @@ template <typename Torus> struct int_div_rem_memory {
abs_mem_2 = new int_abs_buffer<Torus>(streams, params, num_blocks,
allocate_gpu_memory, size_tracker);
uint32_t requested_flag = outputFlag::FLAG_NONE;
uint32_t uses_carry = 0;
scp_mem_1 = new int_sc_prop_memory<Torus>(
streams, params, num_blocks, requested_flag, uses_carry,
allocate_gpu_memory, size_tracker);
streams, params, num_blocks, requested_flag, allocate_gpu_memory,
size_tracker);
scp_mem_2 = new int_sc_prop_memory<Torus>(
streams, params, num_blocks, requested_flag, uses_carry,
allocate_gpu_memory, size_tracker);
streams, params, num_blocks, requested_flag, allocate_gpu_memory,
size_tracker);
std::function<uint64_t(uint64_t)> quotient_predicate_lut_f =
[](uint64_t x) -> uint64_t { return x == 1; };
@@ -5251,7 +5279,7 @@ template <typename Torus> struct int_sub_and_propagate {
this->allocate_gpu_memory = allocate_gpu_memory;
this->sc_prop_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, requested_flag_in, (uint32_t)0,
streams, params, num_radix_blocks, requested_flag_in,
allocate_gpu_memory, size_tracker);
this->neg_rhs_array = new CudaRadixCiphertextFFI;
@@ -5391,8 +5419,8 @@ template <typename Torus> struct int_unsigned_scalar_div_mem {
streams, params, num_radix_blocks, scalar_divisor_ffi->active_bits,
allocate_gpu_memory, size_tracker);
scp_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, FLAG_NONE, (uint32_t)0,
allocate_gpu_memory, size_tracker);
streams, params, num_radix_blocks, FLAG_NONE, allocate_gpu_memory,
size_tracker);
sub_and_propagate_mem = new int_sub_and_propagate<Torus>(
streams, params, num_radix_blocks, FLAG_NONE, allocate_gpu_memory,
size_tracker);
@@ -5545,8 +5573,8 @@ template <typename Torus> struct int_signed_scalar_div_mem {
streams, RIGHT_SHIFT, params, num_radix_blocks,
allocate_gpu_memory, size_tracker);
scp_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, FLAG_NONE, (uint32_t)0,
allocate_gpu_memory, size_tracker);
streams, params, num_radix_blocks, FLAG_NONE, allocate_gpu_memory,
size_tracker);
} else {
@@ -5567,7 +5595,7 @@ template <typename Torus> struct int_signed_scalar_div_mem {
if (scalar_divisor_ffi->is_chosen_multiplier_geq_two_pow_numerator) {
scp_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, FLAG_NONE, (uint32_t)0,
streams, params, num_radix_blocks, FLAG_NONE,
allocate_gpu_memory, size_tracker);
}
}
@@ -5711,8 +5739,8 @@ template <typename Torus> struct int_signed_scalar_div_rem_buffer {
allocate_gpu_memory, size_tracker);
this->scp_mem = new int_sc_prop_memory<Torus>(
streams, params, num_radix_blocks, FLAG_NONE, (uint32_t)0,
allocate_gpu_memory, size_tracker);
streams, params, num_radix_blocks, FLAG_NONE, allocate_gpu_memory,
size_tracker);
bool is_divisor_one = scalar_divisor_ffi->is_abs_divisor_one &&
!scalar_divisor_ffi->is_divisor_negative;
@@ -5906,9 +5934,9 @@ template <typename Torus> struct int_count_of_consecutive_bits_buffer {
streams, params, counter_num_blocks, num_radix_blocks, true,
allocate_gpu_memory, size_tracker);
this->propagate_mem =
new int_sc_prop_memory<Torus>(streams, params, counter_num_blocks, 0, 0,
allocate_gpu_memory, size_tracker);
this->propagate_mem = new int_sc_prop_memory<Torus>(
streams, params, counter_num_blocks, FLAG_NONE, allocate_gpu_memory,
size_tracker);
}
void release(CudaStreams streams) {

View File

@@ -430,7 +430,6 @@ __host__ void tree_sign_reduction(
"than the number of blocks to operate on")
auto params = tree_buffer->params;
auto big_lwe_dimension = params.big_lwe_dimension;
auto glwe_dimension = params.glwe_dimension;
auto polynomial_size = params.polynomial_size;
auto message_modulus = params.message_modulus;

View File

@@ -51,8 +51,8 @@ uint64_t scratch_cuda_propagate_single_carry_kb_64_inplace(
uint32_t small_lwe_dimension, uint32_t ks_level, uint32_t ks_base_log,
uint32_t pbs_level, uint32_t pbs_base_log, uint32_t grouping_factor,
uint32_t num_blocks, uint32_t message_modulus, uint32_t carry_modulus,
PBS_TYPE pbs_type, uint32_t requested_flag, uint32_t uses_carry,
bool allocate_gpu_memory, PBS_MS_REDUCTION_T noise_reduction_type) {
PBS_TYPE pbs_type, uint32_t requested_flag, bool allocate_gpu_memory,
PBS_MS_REDUCTION_T noise_reduction_type) {
int_radix_params params(pbs_type, glwe_dimension, polynomial_size,
big_lwe_dimension, small_lwe_dimension, ks_level,
ks_base_log, pbs_level, pbs_base_log, grouping_factor,
@@ -60,7 +60,7 @@ uint64_t scratch_cuda_propagate_single_carry_kb_64_inplace(
return scratch_cuda_propagate_single_carry_kb_inplace<uint64_t>(
CudaStreams(streams), (int_sc_prop_memory<uint64_t> **)mem_ptr,
num_blocks, params, requested_flag, uses_carry, allocate_gpu_memory);
num_blocks, params, requested_flag, allocate_gpu_memory);
}
uint64_t scratch_cuda_add_and_propagate_single_carry_kb_64_inplace(
@@ -69,8 +69,8 @@ uint64_t scratch_cuda_add_and_propagate_single_carry_kb_64_inplace(
uint32_t small_lwe_dimension, uint32_t ks_level, uint32_t ks_base_log,
uint32_t pbs_level, uint32_t pbs_base_log, uint32_t grouping_factor,
uint32_t num_blocks, uint32_t message_modulus, uint32_t carry_modulus,
PBS_TYPE pbs_type, uint32_t requested_flag, uint32_t uses_carry,
bool allocate_gpu_memory, PBS_MS_REDUCTION_T noise_reduction_type) {
PBS_TYPE pbs_type, uint32_t requested_flag, bool allocate_gpu_memory,
PBS_MS_REDUCTION_T noise_reduction_type) {
int_radix_params params(pbs_type, glwe_dimension, polynomial_size,
big_lwe_dimension, small_lwe_dimension, ks_level,
ks_base_log, pbs_level, pbs_base_log, grouping_factor,
@@ -78,7 +78,7 @@ uint64_t scratch_cuda_add_and_propagate_single_carry_kb_64_inplace(
return scratch_cuda_propagate_single_carry_kb_inplace<uint64_t>(
CudaStreams(streams), (int_sc_prop_memory<uint64_t> **)mem_ptr,
num_blocks, params, requested_flag, uses_carry, allocate_gpu_memory);
num_blocks, params, requested_flag, allocate_gpu_memory);
}
uint64_t scratch_cuda_integer_overflowing_sub_kb_64_inplace(

View File

@@ -242,8 +242,8 @@ __host__ void host_radix_cumulative_sum_in_groups(cudaStream_t stream,
auto lwe_size = dest->lwe_dimension + 1;
cuda_set_device(gpu_index);
// Each CUDA block is responsible for a single group
int num_blocks = (num_radix_blocks + group_size - 1) / group_size,
num_threads = 512;
int num_blocks = CEIL_DIV(num_radix_blocks, group_size);
int num_threads = 512;
device_radix_cumulative_sum_in_groups<Torus>
<<<num_blocks, num_threads, 0, stream>>>(
(Torus *)dest->ptr, (Torus *)src->ptr, num_radix_blocks, lwe_size,
@@ -1566,9 +1566,6 @@ void host_full_propagate_inplace(
void *const *bsks, uint32_t num_blocks) {
auto params = mem_ptr->lut->params;
int big_lwe_size = (params.glwe_dimension * params.polynomial_size + 1);
int small_lwe_size = (params.small_lwe_dimension + 1);
// In the case of extracting a single LWE this parameters are dummy
uint32_t num_many_lut = 1;
uint32_t lut_stride = 0;
@@ -1969,12 +1966,12 @@ template <typename Torus>
uint64_t scratch_cuda_propagate_single_carry_kb_inplace(
CudaStreams streams, int_sc_prop_memory<Torus> **mem_ptr,
uint32_t num_radix_blocks, int_radix_params params, uint32_t requested_flag,
uint32_t uses_carry, bool allocate_gpu_memory) {
bool allocate_gpu_memory) {
PUSH_RANGE("scratch add & propagate sc")
uint64_t size_tracker = 0;
*mem_ptr = new int_sc_prop_memory<Torus>(streams, params, num_radix_blocks,
requested_flag, uses_carry,
allocate_gpu_memory, size_tracker);
requested_flag, allocate_gpu_memory,
size_tracker);
POP_RANGE()
return size_tracker;
}
@@ -2116,9 +2113,6 @@ void host_add_and_propagate_single_carry(
auto num_radix_blocks = lhs_array->num_radix_blocks;
auto params = mem->params;
auto glwe_dimension = params.glwe_dimension;
auto polynomial_size = params.polynomial_size;
uint32_t big_lwe_size = glwe_dimension * polynomial_size + 1;
auto lut_stride = mem->lut_stride;
auto num_many_lut = mem->num_many_lut;
CudaRadixCiphertextFFI output_flag;
@@ -2390,7 +2384,6 @@ __host__ void integer_radix_apply_noise_squashing_kb(
PUSH_RANGE("apply noise squashing")
auto params = lut->params;
auto pbs_type = params.pbs_type;
auto big_lwe_dimension = params.big_lwe_dimension;
auto small_lwe_dimension = params.small_lwe_dimension;
auto ks_level = params.ks_level;

View File

@@ -808,7 +808,6 @@ unsafe extern "C" {
carry_modulus: u32,
pbs_type: PBS_TYPE,
requested_flag: u32,
uses_carry: u32,
allocate_gpu_memory: bool,
noise_reduction_type: PBS_MS_REDUCTION_T,
) -> u64;
@@ -831,7 +830,6 @@ unsafe extern "C" {
carry_modulus: u32,
pbs_type: PBS_TYPE,
requested_flag: u32,
uses_carry: u32,
allocate_gpu_memory: bool,
noise_reduction_type: PBS_MS_REDUCTION_T,
) -> u64;

View File

@@ -421,23 +421,32 @@ pub fn throughput_num_threads(num_block: usize, op_pbs_count: u64) -> u64 {
let block_multiplicator = (ref_block_count as f64 / num_block as f64).ceil().min(1.0);
// Some operations with a high serial workload (e.g. division) would yield an operation
// loading value so low that the number of elements in the end wouldn't be meaningful.
let minimum_loading = if num_block < 64 { 0.2 } else { 0.01 };
let minimum_loading = if num_block < 64 { 1.0 } else { 0.015 };
#[cfg(feature = "gpu")]
{
let num_sms_per_gpu = get_number_of_sms();
let total_num_sm = num_sms_per_gpu * get_number_of_gpus();
let total_blocks_per_sm = 4u32; // Assume each SM can handle 4 blocks concurrently
let total_num_sm = total_blocks_per_sm * total_num_sm;
let total_blocks_per_sm = 4u64; // Assume each SM can handle 4 blocks concurrently
let min_num_waves = 4u64; //Enforce at least 4 waves in the GPU
let elements_per_wave = total_num_sm as u64 / (num_block as u64);
let block_factor = ((2.0f64 * num_block as f64) / 4.0f64).ceil() as u64;
let elements_per_wave = total_blocks_per_sm * total_num_sm as u64 / block_factor;
// We need to enable the new load for pbs benches and for sizes larger than 16 blocks in
// demanding operations for the rest of operations we maintain a minimum of 200
// elements
let min_elements = if op_pbs_count == 1
|| (op_pbs_count > (num_block * num_block) as u64 && num_block >= 16)
{
elements_per_wave * min_num_waves
} else {
200u64
};
let operation_loading = ((total_num_sm as u64 / op_pbs_count) as f64).max(minimum_loading);
let elements = (total_num_sm as f64 * block_multiplicator * operation_loading) as u64;
elements.min(elements_per_wave * min_num_waves) // This threshold is useful for operation
// with both a small number of
// block and low PBs count.
elements.min(min_elements) // This threshold is useful for operation
// with both a small number of
// block and low PBs count.
}
#[cfg(feature = "hpu")]
{

View File

@@ -2323,7 +2323,6 @@ pub(crate) unsafe fn propagate_single_carry_assign_async<T: UnsignedInteger, B:
carry_modulus.0 as u32,
pbs_type as u32,
requested_flag as u32,
uses_carry,
true,
noise_reduction_type as u32,
);
@@ -2360,7 +2359,6 @@ pub(crate) fn get_propagate_single_carry_assign_async_size_on_gpu(
pbs_type: PBSType,
grouping_factor: LweBskGroupingFactor,
requested_flag: OutputFlag,
uses_carry: u32,
ms_noise_reduction_configuration: Option<&CudaModulusSwitchNoiseReductionConfiguration>,
) -> u64 {
let noise_reduction_type = resolve_noise_reduction_type(ms_noise_reduction_configuration);
@@ -2385,7 +2383,6 @@ pub(crate) fn get_propagate_single_carry_assign_async_size_on_gpu(
carry_modulus.0 as u32,
pbs_type as u32,
requested_flag as u32,
uses_carry,
false,
noise_reduction_type as u32,
)
@@ -2412,7 +2409,6 @@ pub(crate) fn get_add_and_propagate_single_carry_assign_async_size_on_gpu(
pbs_type: PBSType,
grouping_factor: LweBskGroupingFactor,
requested_flag: OutputFlag,
uses_carry: u32,
ms_noise_reduction_configuration: Option<&CudaModulusSwitchNoiseReductionConfiguration>,
) -> u64 {
let noise_reduction_type = resolve_noise_reduction_type(ms_noise_reduction_configuration);
@@ -2437,7 +2433,6 @@ pub(crate) fn get_add_and_propagate_single_carry_assign_async_size_on_gpu(
carry_modulus.0 as u32,
pbs_type as u32,
requested_flag as u32,
uses_carry,
false,
noise_reduction_type as u32,
)
@@ -2759,7 +2754,6 @@ pub(crate) unsafe fn add_and_propagate_single_carry_assign_async<T: UnsignedInte
carry_modulus.0 as u32,
pbs_type as u32,
requested_flag as u32,
uses_carry,
true,
noise_reduction_type as u32,
);

View File

@@ -214,7 +214,6 @@ impl CudaServerKey {
PBSType::Classical,
LweBskGroupingFactor(0),
OutputFlag::None,
0u32,
d_bsk.ms_noise_reduction_configuration.as_ref(),
)
}
@@ -234,7 +233,6 @@ impl CudaServerKey {
PBSType::MultiBit,
d_multibit_bsk.grouping_factor,
OutputFlag::None,
0u32,
None,
)
}

View File

@@ -1,6 +1,6 @@
use crate::core_crypto::gpu::lwe_ciphertext_list::CudaLweCiphertextList;
use crate::core_crypto::gpu::CudaStreams;
use crate::core_crypto::prelude::{CiphertextModulus, LweBskGroupingFactor, LweCiphertextCount};
use crate::core_crypto::prelude::{LweBskGroupingFactor, LweCiphertextCount};
use crate::integer::gpu::ciphertext::boolean_value::CudaBooleanBlock;
use crate::integer::gpu::ciphertext::info::CudaRadixCiphertextInfo;
use crate::integer::gpu::ciphertext::{CudaIntegerRadixCiphertext, CudaRadixCiphertext};
@@ -38,7 +38,7 @@ impl CudaServerKey {
let block = CudaLweCiphertextList::new(
ct_left.as_ref().d_blocks.lwe_dimension(),
LweCiphertextCount(1),
CiphertextModulus::new_native(),
self.ciphertext_modulus,
streams,
);
let mut block_info = ct_left.as_ref().info.blocks[0];

View File

@@ -261,7 +261,6 @@ impl CudaServerKey {
PBSType::Classical,
LweBskGroupingFactor(0),
OutputFlag::None,
0u32,
d_bsk.ms_noise_reduction_configuration.as_ref(),
)
}
@@ -281,7 +280,6 @@ impl CudaServerKey {
PBSType::MultiBit,
d_multibit_bsk.grouping_factor,
OutputFlag::None,
0u32,
None,
)
}

View File

@@ -1,7 +1,7 @@
use crate::core_crypto::gpu::lwe_ciphertext_list::CudaLweCiphertextList;
use crate::core_crypto::gpu::vec::CudaVec;
use crate::core_crypto::gpu::CudaStreams;
use crate::core_crypto::prelude::{CiphertextModulus, LweBskGroupingFactor, LweCiphertextCount};
use crate::core_crypto::prelude::{LweBskGroupingFactor, LweCiphertextCount};
use crate::integer::block_decomposition::{BlockDecomposer, DecomposableInto};
use crate::integer::gpu::ciphertext::boolean_value::CudaBooleanBlock;
use crate::integer::gpu::ciphertext::info::CudaRadixCiphertextInfo;
@@ -160,7 +160,7 @@ impl CudaServerKey {
let block = CudaLweCiphertextList::new(
ct.as_ref().d_blocks.lwe_dimension(),
LweCiphertextCount(1),
CiphertextModulus::new_native(),
self.ciphertext_modulus,
streams,
);
let mut block_info = ct.as_ref().info.blocks[0];

View File

@@ -8,7 +8,7 @@ use syn::{
use crate::{
add_lifetime_param, add_trait_where_clause, add_where_lifetime_bound_to_generics,
extend_where_clause, filter_unsized_bounds, parse_const_str, DESERIALIZE_TRAIT_NAME,
LIFETIME_NAME, SERIALIZE_TRAIT_NAME,
FROM_TRAIT_NAME, LIFETIME_NAME, RESULT_TYPE_NAME, SERIALIZE_TRAIT_NAME, TRY_FROM_TRAIT_NAME,
};
/// Generates an impl block for the From trait. This will be:
@@ -28,9 +28,11 @@ pub(crate) fn generate_from_trait_impl(
from_variable_name: &str,
) -> syn::Result<ItemImpl> {
let from_variable = Ident::new(from_variable_name, Span::call_site());
let from_trait: Path = parse_const_str(FROM_TRAIT_NAME);
Ok(parse_quote! {
#[automatically_derived]
impl #impl_generics From<#src> for #dest #where_clause {
impl #impl_generics #from_trait<#src> for #dest #where_clause {
fn from(#from_variable: #src) -> Self {
#constructor
}
@@ -57,11 +59,14 @@ pub(crate) fn generate_try_from_trait_impl(
from_variable_name: &str,
) -> syn::Result<ItemImpl> {
let from_variable = Ident::new(from_variable_name, Span::call_site());
let result_type: Path = parse_const_str(RESULT_TYPE_NAME);
let try_from_trait: Path = parse_const_str(TRY_FROM_TRAIT_NAME);
Ok(parse_quote! {
#[automatically_derived]
impl #impl_generics TryFrom<#src> for #dest #where_clause {
impl #impl_generics #try_from_trait<#src> for #dest #where_clause {
type Error = #error;
fn try_from(#from_variable: #src) -> Result<Self, Self::Error> {
fn try_from(#from_variable: #src) -> #result_type<Self, Self::Error> {
#constructor
}
}

View File

@@ -46,6 +46,7 @@ pub(crate) const UNVERSIONIZE_ERROR_NAME: &str = crate_full_path!("UnversionizeE
pub(crate) const SERIALIZE_TRAIT_NAME: &str = "::serde::Serialize";
pub(crate) const DESERIALIZE_TRAIT_NAME: &str = "::serde::Deserialize";
pub(crate) const DESERIALIZE_OWNED_TRAIT_NAME: &str = "::serde::de::DeserializeOwned";
pub(crate) const TRY_FROM_TRAIT_NAME: &str = "::core::convert::TryFrom";
pub(crate) const FROM_TRAIT_NAME: &str = "::core::convert::From";
pub(crate) const TRY_INTO_TRAIT_NAME: &str = "::core::convert::TryInto";
pub(crate) const INTO_TRAIT_NAME: &str = "::core::convert::Into";
@@ -53,6 +54,8 @@ pub(crate) const ERROR_TRAIT_NAME: &str = "::core::error::Error";
pub(crate) const SYNC_TRAIT_NAME: &str = "::core::marker::Sync";
pub(crate) const SEND_TRAIT_NAME: &str = "::core::marker::Send";
pub(crate) const DEFAULT_TRAIT_NAME: &str = "::core::default::Default";
pub(crate) const RESULT_TYPE_NAME: &str = "::core::result::Result";
pub(crate) const VEC_TYPE_NAME: &str = "::std::vec::Vec";
pub(crate) const STATIC_LIFETIME_NAME: &str = "'static";
use associated::AssociatingTrait;
@@ -240,6 +243,9 @@ pub fn derive_versionize(input: TokenStream) -> TokenStream {
let unversionize_body = implementor.unversionize_method_body(&unversionize_arg_name);
let unversionize_error: Path = parse_const_str(UNVERSIONIZE_ERROR_NAME);
let result_type: Path = parse_const_str(RESULT_TYPE_NAME);
let vec_type: Path = parse_const_str(VEC_TYPE_NAME);
quote! {
#version_trait_impl
@@ -269,7 +275,7 @@ pub fn derive_versionize(input: TokenStream) -> TokenStream {
impl #trait_impl_generics #unversionize_trait for #input_ident #ty_generics
#unversionize_trait_where_clause
{
fn unversionize(#unversionize_arg_name: Self::VersionedOwned) -> Result<Self, #unversionize_error> {
fn unversionize(#unversionize_arg_name: Self::VersionedOwned) -> #result_type<Self, #unversionize_error> {
#unversionize_body
}
}
@@ -278,7 +284,7 @@ pub fn derive_versionize(input: TokenStream) -> TokenStream {
impl #trait_impl_generics #versionize_slice_trait for #input_ident #ty_generics
#versionize_trait_where_clause
{
type VersionedSlice<#lifetime> = Vec<<Self as #versionize_trait>::Versioned<#lifetime>> #versioned_type_where_clause;
type VersionedSlice<#lifetime> = #vec_type<<Self as #versionize_trait>::Versioned<#lifetime>> #versioned_type_where_clause;
fn versionize_slice(slice: &[Self]) -> Self::VersionedSlice<'_> {
slice.iter().map(|val| #versionize_trait::versionize(val)).collect()
@@ -290,9 +296,9 @@ pub fn derive_versionize(input: TokenStream) -> TokenStream {
#versionize_owned_trait_where_clause
{
type VersionedVec = Vec<<Self as #versionize_owned_trait>::VersionedOwned> #versioned_owned_type_where_clause;
type VersionedVec = #vec_type<<Self as #versionize_owned_trait>::VersionedOwned> #versioned_owned_type_where_clause;
fn versionize_vec(vec: Vec<Self>) -> Self::VersionedVec {
fn versionize_vec(vec: #vec_type<Self>) -> Self::VersionedVec {
vec.into_iter().map(|val| #versionize_owned_trait::versionize_owned(val)).collect()
}
}
@@ -301,7 +307,7 @@ pub fn derive_versionize(input: TokenStream) -> TokenStream {
impl #trait_impl_generics #unversionize_vec_trait for #input_ident #ty_generics
#unversionize_trait_where_clause
{
fn unversionize_vec(versioned: Self::VersionedVec) -> Result<Vec<Self>, #unversionize_error> {
fn unversionize_vec(versioned: Self::VersionedVec) -> #result_type<#vec_type<Self>, #unversionize_error> {
versioned
.into_iter()
.map(|versioned| <Self as #unversionize_trait>::unversionize(versioned))
@@ -346,6 +352,8 @@ pub fn derive_not_versioned(input: TokenStream) -> TokenStream {
let unversionize_error: Path = parse_const_str(UNVERSIONIZE_ERROR_NAME);
let lifetime = Lifetime::new(LIFETIME_NAME, Span::call_site());
let result_type: Path = parse_const_str(RESULT_TYPE_NAME);
quote! {
#[automatically_derived]
impl #impl_generics #versionize_trait for #input_ident #ty_generics #versionize_where_clause {
@@ -367,7 +375,7 @@ pub fn derive_not_versioned(input: TokenStream) -> TokenStream {
#[automatically_derived]
impl #impl_generics #unversionize_trait for #input_ident #ty_generics #versionize_owned_where_clause {
fn unversionize(versioned: Self::VersionedOwned) -> Result<Self, #unversionize_error> {
fn unversionize(versioned: Self::VersionedOwned) -> #result_type<Self, #unversionize_error> {
Ok(versioned)
}
}