chore(zk): add benches to tfhe-zk-pok

This commit is contained in:
Nicolas Sarlin
2024-11-08 16:41:28 +01:00
committed by Nicolas Sarlin
parent e59a680407
commit a45b7b3974
9 changed files with 853 additions and 4 deletions

View File

@@ -0,0 +1,173 @@
# Run benchmarks of the tfhe-zk-pok crate on an instance and return parsed results to Slab CI bot.
name: tfhe-zk-pok benchmarks
on:
workflow_dispatch:
push:
branches:
- main
schedule:
# Weekly benchmarks will be triggered each Saturday at 3a.m.
- cron: '0 3 * * 6'
env:
CARGO_TERM_COLOR: always
RESULTS_FILENAME: parsed_benchmark_results_${{ github.sha }}.json
PARSE_INTEGER_BENCH_CSV_FILE: tfhe_rs_integer_benches_${{ github.sha }}.csv
ACTION_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}
RUST_BACKTRACE: "full"
RUST_MIN_STACK: "8388608"
SLACK_CHANNEL: ${{ secrets.SLACK_CHANNEL }}
SLACK_ICON: https://pbs.twimg.com/profile_images/1274014582265298945/OjBKP9kn_400x400.png
SLACK_USERNAME: ${{ secrets.BOT_USERNAME }}
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
jobs:
should-run:
runs-on: ubuntu-latest
if: github.event_name == 'workflow_dispatch' ||
((github.event_name == 'push' || github.event_name == 'schedule') && github.repository == 'zama-ai/tfhe-rs')
outputs:
zk_pok_changed: ${{ steps.changed-files.outputs.zk_pok_any_changed }}
steps:
- name: Checkout tfhe-rs
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
- name: Check for file changes
id: changed-files
uses: tj-actions/changed-files@c3a1bb2c992d77180ae65be6ae6c166cf40f857c
with:
since_last_remote_commit: true
files_yaml: |
zk_pok:
- tfhe-zk-pok/**
- .github/workflows/benchmark_tfhe_zk_pok.yml
setup-instance:
name: Setup instance (tfhe-zk-pok-benchmarks)
runs-on: ubuntu-latest
needs: should-run
if: github.event_name == 'workflow_dispatch' ||
(github.event_name == 'schedule' && github.repository == 'zama-ai/tfhe-rs') ||
(github.event_name == 'push' &&
github.repository == 'zama-ai/tfhe-rs' &&
needs.should-run.outputs.zk_pok_changed == 'true')
outputs:
runner-name: ${{ steps.start-instance.outputs.label }}
steps:
- name: Start instance
id: start-instance
uses: zama-ai/slab-github-runner@801df0b8db5ea2b06128b7476c652f5ed5f193a8
with:
mode: start
github-token: ${{ secrets.SLAB_ACTION_TOKEN }}
slab-url: ${{ secrets.SLAB_BASE_URL }}
job-secret: ${{ secrets.JOB_SECRET }}
backend: aws
profile: bench
tfhe-zk-pok-benchmarks:
name: Execute tfhe-zk-pok benchmarks
if: needs.setup-instance.result != 'skipped'
needs: setup-instance
concurrency:
group: ${{ github.workflow }}_${{github.event_name}}_${{ github.ref }}${{ github.ref == 'refs/heads/main' && github.sha || '' }}
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
runs-on: ${{ needs.setup-instance.outputs.runner-name }}
steps:
- name: Checkout tfhe-rs repo with tags
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
fetch-depth: 0
token: ${{ secrets.FHE_ACTIONS_TOKEN }}
- name: Get benchmark details
run: |
{
echo "BENCH_DATE=$(date --iso-8601=seconds)";
echo "COMMIT_DATE=$(git --no-pager show -s --format=%cd --date=iso8601-strict ${{ github.sha }})";
echo "COMMIT_HASH=$(git describe --tags --dirty)";
} >> "${GITHUB_ENV}"
- name: Install rust
uses: dtolnay/rust-toolchain@7b1c307e0dcbda6122208f10795a713336a9b35a
with:
toolchain: nightly
- name: Checkout Slab repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: zama-ai/slab
path: slab
token: ${{ secrets.FHE_ACTIONS_TOKEN }}
- name: Run benchmarks
run: |
make bench_tfhe_zk_pok
- name: Parse results
run: |
python3 ./ci/benchmark_parser.py target/criterion ${{ env.RESULTS_FILENAME }} \
--database tfhe_rs \
--crate tfhe-zk-pok \
--hardware "hpc7a.96xlarge" \
--backend cpu \
--project-version "${{ env.COMMIT_HASH }}" \
--branch ${{ github.ref_name }} \
--commit-date "${{ env.COMMIT_DATE }}" \
--bench-date "${{ env.BENCH_DATE }}" \
--walk-subdirs \
--name-suffix avx512 \
--throughput
- name: Upload parsed results artifact
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882
with:
name: ${{ github.sha }}_tfhe_zk_pok
path: ${{ env.RESULTS_FILENAME }}
- name: Checkout Slab repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
repository: zama-ai/slab
path: slab
token: ${{ secrets.FHE_ACTIONS_TOKEN }}
- name: Send data to Slab
shell: bash
run: |
python3 slab/scripts/data_sender.py ${{ env.RESULTS_FILENAME }} "${{ secrets.JOB_SECRET }}" \
--slab-url "${{ secrets.SLAB_URL }}"
- name: Slack Notification
if: ${{ failure() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@c33737706dea87cd7784c687dadc9adf1be59990
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "tfhe-zk-pok benchmarks finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"
teardown-instance:
name: Teardown instance (tfhe-zk-pok-benchmarks)
if: ${{ always() && needs.setup-instance.result != 'skipped' }}
needs: [ setup-instance, tfhe-zk-pok-benchmarks ]
runs-on: ubuntu-latest
steps:
- name: Stop instance
id: stop-instance
uses: zama-ai/slab-github-runner@801df0b8db5ea2b06128b7476c652f5ed5f193a8
with:
mode: stop
github-token: ${{ secrets.SLAB_ACTION_TOKEN }}
slab-url: ${{ secrets.SLAB_BASE_URL }}
job-secret: ${{ secrets.JOB_SECRET }}
label: ${{ needs.setup-instance.outputs.runner-name }}
- name: Slack Notification
if: ${{ failure() }}
continue-on-error: true
uses: rtCamp/action-slack-notify@c33737706dea87cd7784c687dadc9adf1be59990
env:
SLACK_COLOR: ${{ job.status }}
SLACK_MESSAGE: "Instance teardown (tfhe-zk-pok-benchmarks) finished with status: ${{ job.status }}. (${{ env.ACTION_RUN_URL }})"

1
.gitignore vendored
View File

@@ -13,6 +13,7 @@ target/
# Some of our bench outputs
/tfhe/benchmarks_parameters
/tfhe-zk-pok/benchmarks_parameters
**/*.csv
# dieharder run log

View File

@@ -1191,6 +1191,11 @@ bench_hlapi_erc20_gpu: install_rs_check_toolchain
--bench hlapi-erc20 \
--features=$(TARGET_ARCH_FEATURE),integer,gpu,internal-keycache,pbs-stats,nightly-avx512 -p $(TFHE_SPEC) --
.PHONY: bench_tfhe_zk_pok # Run benchmarks for the tfhe_zk_pok crate
bench_tfhe_zk_pok: install_rs_check_toolchain
RUSTFLAGS="$(RUSTFLAGS)" \
cargo $(CARGO_RS_CHECK_TOOLCHAIN) bench -p tfhe-zk-pok --
#
# Utility tools
#

View File

@@ -94,10 +94,17 @@ parser.add_argument(
default="cpu",
help="Backend on which benchmarks have run",
)
parser.add_argument(
"--crate",
dest="crate",
default="tfhe",
help="Crate for which benchmarks have run",
)
def recursive_parse(
directory,
crate,
walk_subdirs=False,
name_suffix="",
compute_throughput=False,
@@ -108,6 +115,7 @@ def recursive_parse(
.json extension at the top-level of this directory.
:param directory: path to directory that contains raw results as :class:`pathlib.Path`
:param crate: the name of the crate that has been benched
:param walk_subdirs: traverse results subdirectories if parameters changes for benchmark case.
:param name_suffix: a :class:`str` suffix to apply to each test name found
:param compute_throughput: compute number of operations per second and operations per
@@ -143,7 +151,7 @@ def recursive_parse(
continue
try:
params, display_name, operator = get_parameters(test_name)
params, display_name, operator = get_parameters(test_name, crate)
except Exception as err:
parsing_failures.append((full_name, f"failed to get parameters: {err}"))
continue
@@ -276,7 +284,7 @@ def _parse_key_results(result_file, bench_type):
reader = csv.reader(csv_file)
for test_name, value in reader:
try:
params, display_name, operator = get_parameters(test_name)
params, display_name, operator = get_parameters(test_name, crate)
except Exception as err:
parsing_failures.append((test_name, f"failed to get parameters: {err}"))
continue
@@ -318,15 +326,16 @@ def parse_key_gen_time(result_file):
return _parse_key_results(result_file, "latency")
def get_parameters(bench_id):
def get_parameters(bench_id, directory):
"""
Get benchmarks parameters recorded for a given benchmark case.
:param bench_id: function name used for the benchmark case
:param directory: directory where the parameters are stored
:return: :class:`tuple` as ``(benchmark parameters, display name, operator type)``
"""
params_dir = pathlib.Path("tfhe", "benchmarks_parameters", bench_id)
params_dir = pathlib.Path(directory, "benchmarks_parameters", bench_id)
params = _parse_file_to_json(params_dir, "parameters.json")
display_name = params.pop("display_name")
@@ -459,6 +468,7 @@ if __name__ == "__main__":
results, failures = recursive_parse(
raw_results,
args.crate,
args.walk_subdirs,
args.name_suffix,
args.throughput,

View File

@@ -28,3 +28,12 @@ tfhe-versionable = { version = "0.3.2", path = "../utils/tfhe-versionable" }
serde_json = "~1.0"
itertools = "0.11.0"
bincode = "1.3.3"
criterion = "0.5.1"
[[bench]]
name = "pke_v1"
harness = false
[[bench]]
name = "pke_v2"
harness = false

View File

@@ -0,0 +1,90 @@
use criterion::{criterion_group, criterion_main, Criterion};
use tfhe_zk_pok::proofs::pke::{prove, verify};
use tfhe_zk_pok::proofs::ComputeLoad;
use utils::{write_to_json, PKEV1_TEST_PARAMS, PKEV2_TEST_PARAMS};
#[path = "./utils.rs"]
mod utils;
use crate::utils::init_params_v1;
fn bench_pke_v1_prove(c: &mut Criterion) {
let bench_shortname = "pke_zk_proof_v1";
let bench_name = format!("tfhe_zk_pok::{bench_shortname}");
let mut bench_group = c.benchmark_group(&bench_name);
bench_group
.sample_size(15)
.measurement_time(std::time::Duration::from_secs(60));
let rng = &mut rand::thread_rng();
for (params, param_name) in [
(PKEV1_TEST_PARAMS, "PKEV1_TEST_PARAMS"),
(PKEV2_TEST_PARAMS, "PKEV2_TEST_PARAMS"),
] {
let (public_param, public_commit, private_commit, metadata) = init_params_v1(params);
let effective_t = params.t >> 1;
let bits = (params.k as u32) * effective_t.ilog2();
for load in [ComputeLoad::Proof, ComputeLoad::Verify] {
let bench_id = format!("{bench_name}::{param_name}_{bits}_bits_packed_{load}");
bench_group.bench_function(&bench_id, |b| {
b.iter(|| {
prove(
(&public_param, &public_commit),
&private_commit,
&metadata,
load,
rng,
)
})
});
write_to_json(&bench_id, params, param_name, bench_shortname);
}
}
}
fn bench_pke_v1_verify(c: &mut Criterion) {
let bench_shortname = "pke_zk_verify_v1";
let bench_name = format!("tfhe_zk_pok::{bench_shortname}");
let mut bench_group = c.benchmark_group(&bench_name);
bench_group
.sample_size(15)
.measurement_time(std::time::Duration::from_secs(60));
let rng = &mut rand::thread_rng();
for (params, param_name) in [
(PKEV1_TEST_PARAMS, "PKEV1_TEST_PARAMS"),
(PKEV2_TEST_PARAMS, "PKEV2_TEST_PARAMS"),
] {
let (public_param, public_commit, private_commit, metadata) = init_params_v1(params);
let effective_t = params.t >> 1;
let bits = (params.k as u32) * effective_t.ilog2();
for load in [ComputeLoad::Proof, ComputeLoad::Verify] {
let bench_id = format!("{bench_name}::{param_name}_{bits}_bits_packed_{load}");
let proof = prove(
(&public_param, &public_commit),
&private_commit,
&metadata,
load,
rng,
);
bench_group.bench_function(&bench_id, |b| {
b.iter(|| {
verify(&proof, (&public_param, &public_commit), &metadata).unwrap();
})
});
write_to_json(&bench_id, params, param_name, bench_shortname);
}
}
}
criterion_group!(benches_pke_v1, bench_pke_v1_verify, bench_pke_v1_prove);
criterion_main!(benches_pke_v1);

View File

@@ -0,0 +1,92 @@
use criterion::{criterion_group, criterion_main, Criterion};
use tfhe_zk_pok::proofs::pke_v2::{prove, verify};
use tfhe_zk_pok::proofs::ComputeLoad;
use utils::{init_params_v2, write_to_json, PKEV1_TEST_PARAMS, PKEV2_TEST_PARAMS};
#[path = "./utils.rs"]
mod utils;
fn bench_pke_v2_prove(c: &mut Criterion) {
let bench_shortname = "pke_zk_proof_v2";
let bench_name = format!("tfhe_zk_pok::{bench_shortname}");
let mut bench_group = c.benchmark_group(&bench_name);
bench_group
.sample_size(15)
.measurement_time(std::time::Duration::from_secs(60));
let rng = &mut rand::thread_rng();
for (params, param_name) in [
(PKEV1_TEST_PARAMS, "PKEV1_TEST_PARAMS"),
(PKEV2_TEST_PARAMS, "PKEV2_TEST_PARAMS"),
] {
let (public_param, public_commit, private_commit, metadata) = init_params_v2(params);
let effective_t = params.t >> 1;
let bits = (params.k as u32) * effective_t.ilog2();
for load in [ComputeLoad::Proof, ComputeLoad::Verify] {
let zk_load = match load {
ComputeLoad::Proof => "compute_load_proof",
ComputeLoad::Verify => "compute_load_verify",
};
let bench_id = format!("{bench_name}::{param_name}_{bits}_bits_packed_{zk_load}");
bench_group.bench_function(&bench_id, |b| {
b.iter(|| {
prove(
(&public_param, &public_commit),
&private_commit,
&metadata,
load,
rng,
)
})
});
write_to_json(&bench_id, params, param_name, bench_shortname);
}
}
}
fn bench_pke_v2_verify(c: &mut Criterion) {
let bench_shortname = "pke_zk_verify_v2";
let bench_name = format!("tfhe_zk_pok::{bench_shortname}");
let mut bench_group = c.benchmark_group(&bench_name);
bench_group
.sample_size(15)
.measurement_time(std::time::Duration::from_secs(60));
let rng = &mut rand::thread_rng();
for (params, param_name) in [
(PKEV1_TEST_PARAMS, "PKEV1_TEST_PARAMS"),
(PKEV2_TEST_PARAMS, "PKEV2_TEST_PARAMS"),
] {
let (public_param, public_commit, private_commit, metadata) = init_params_v2(params);
let effective_t = params.t >> 1;
let bits = (params.k as u32) * effective_t.ilog2();
for load in [ComputeLoad::Proof, ComputeLoad::Verify] {
let bench_id = format!("{bench_name}::{param_name}_{bits}_bits_packed_{load}");
let proof = prove(
(&public_param, &public_commit),
&private_commit,
&metadata,
load,
rng,
);
bench_group.bench_function(&bench_id, |b| {
b.iter(|| {
verify(&proof, (&public_param, &public_commit), &metadata).unwrap();
})
});
write_to_json(&bench_id, params, param_name, bench_shortname);
}
}
}
criterion_group!(benches_pke_v2, bench_pke_v2_verify, bench_pke_v2_prove);
criterion_main!(benches_pke_v2);

View File

@@ -0,0 +1,459 @@
#![allow(non_snake_case)]
use std::fs;
use std::path::PathBuf;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
use serde::Serialize;
use tfhe_zk_pok::proofs::pke::{commit, crs_gen, PrivateCommit, PublicCommit, PublicParams};
use tfhe_zk_pok::proofs::pke_v2::{
commit as commitv2, crs_gen as crs_genv2, PrivateCommit as PrivateCommitv2,
PublicCommit as PublicCommitv2, PublicParams as PublicParamsv2,
};
// One of our usecases uses 320 bits of additional metadata
pub const METADATA_LEN: usize = (320 / u8::BITS) as usize;
pub fn polymul_rev(a: &[i64], b: &[i64]) -> Vec<i64> {
assert_eq!(a.len(), b.len());
let d = a.len();
let mut c = vec![0i64; d];
for i in 0..d {
for j in 0..d {
if i + j < d {
c[i + j] = c[i + j].wrapping_add(a[i].wrapping_mul(b[d - j - 1]));
} else {
c[i + j - d] = c[i + j - d].wrapping_sub(a[i].wrapping_mul(b[d - j - 1]));
}
}
}
c
}
#[derive(Clone, Copy, Default, Serialize)]
pub struct CryptoParametersRecord {
pub lwe_dimension: usize,
#[serde(serialize_with = "CryptoParametersRecord::serialize_distribution")]
pub lwe_noise_distribution: u64,
pub message_modulus: u64,
pub carry_modulus: u64,
pub ciphertext_modulus: u64,
}
impl CryptoParametersRecord {
pub fn noise_distribution_as_string(bound: u64) -> String {
format!("TUniform({})", bound.ilog2())
}
pub fn serialize_distribution<S>(
noise_distribution: &u64,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&Self::noise_distribution_as_string(*noise_distribution))
}
}
#[derive(Serialize)]
enum PolynomialMultiplication {
Fft,
// Ntt,
}
#[derive(Serialize)]
enum IntegerRepresentation {
Radix,
// Crt,
// Hybrid,
}
#[derive(Serialize)]
enum ExecutionType {
Sequential,
Parallel,
}
#[derive(Serialize)]
enum KeySetType {
Single,
// Multi,
}
#[derive(Serialize)]
enum OperandType {
CipherText,
PlainText,
}
#[derive(Clone, Serialize)]
pub enum OperatorType {
Atomic,
// AtomicPattern,
}
#[derive(Serialize)]
struct BenchmarkParametersRecord {
display_name: String,
crypto_parameters_alias: String,
crypto_parameters: CryptoParametersRecord,
message_modulus: Option<usize>,
carry_modulus: Option<usize>,
ciphertext_modulus: usize,
bit_size: u32,
polynomial_multiplication: PolynomialMultiplication,
precision: u32,
error_probability: f64,
integer_representation: IntegerRepresentation,
decomposition_basis: Vec<u32>,
pbs_algorithm: Option<String>,
execution_type: ExecutionType,
key_set_type: KeySetType,
operand_type: OperandType,
operator_type: OperatorType,
}
/// Writes benchmarks parameters to disk in JSON format.
pub fn write_to_json<T: Into<CryptoParametersRecord>>(
bench_id: &str,
params: T,
params_alias: impl Into<String>,
display_name: impl Into<String>,
) {
let params = params.into();
let execution_type = match bench_id.contains("parallelized") {
true => ExecutionType::Parallel,
false => ExecutionType::Sequential,
};
let operand_type = match bench_id.contains("scalar") {
true => OperandType::PlainText,
false => OperandType::CipherText,
};
let record = BenchmarkParametersRecord {
display_name: display_name.into(),
crypto_parameters_alias: params_alias.into(),
crypto_parameters: params,
message_modulus: Some(params.message_modulus as usize),
carry_modulus: Some(params.carry_modulus as usize),
ciphertext_modulus: 64,
bit_size: params.message_modulus as u32,
polynomial_multiplication: PolynomialMultiplication::Fft,
precision: (params.message_modulus as u32).ilog2(),
error_probability: 2f64.powf(-41.0),
integer_representation: IntegerRepresentation::Radix,
decomposition_basis: Vec::new(),
pbs_algorithm: None, // To be added in future version
execution_type,
key_set_type: KeySetType::Single,
operand_type,
operator_type: OperatorType::Atomic,
};
let mut params_directory = ["benchmarks_parameters", bench_id]
.iter()
.collect::<PathBuf>();
fs::create_dir_all(&params_directory).unwrap();
params_directory.push("parameters.json");
fs::write(params_directory, serde_json::to_string(&record).unwrap()).unwrap();
}
impl From<PkeTestParameters> for CryptoParametersRecord {
fn from(value: PkeTestParameters) -> Self {
let effective = value.t / 2; // Remove padding bit
let (message_modulus, carry_modulus) = match effective.ilog2() {
2 => (2, 2),
4 => (4, 4),
6 => (8, 8),
8 => (16, 16),
_ => panic!("Unsupported parameters for tfhe-zk-pok bench"),
};
Self {
lwe_dimension: value.d,
lwe_noise_distribution: value.B,
message_modulus,
carry_modulus,
ciphertext_modulus: value.q,
}
}
}
/// parameters needed for a PKE zk proof test
#[derive(Copy, Clone)]
pub struct PkeTestParameters {
pub d: usize,
pub k: usize,
pub B: u64,
pub q: u64,
pub t: u64,
pub msbs_zero_padding_bit_count: u64,
}
/// An encrypted PKE ciphertext
pub struct PkeTestCiphertext {
pub c1: Vec<i64>,
pub c2: Vec<i64>,
}
/// A randomly generated testcase of pke encryption
pub struct PkeTestcase {
pub a: Vec<i64>,
pub e1: Vec<i64>,
pub e2: Vec<i64>,
pub r: Vec<i64>,
pub m: Vec<i64>,
pub b: Vec<i64>,
pub metadata: [u8; METADATA_LEN],
s: Vec<i64>,
}
impl PkeTestcase {
pub fn gen(rng: &mut StdRng, params: PkeTestParameters) -> Self {
let PkeTestParameters {
d,
k,
B,
q: _q,
t,
msbs_zero_padding_bit_count,
} = params;
let effective_cleartext_t = t >> msbs_zero_padding_bit_count;
let a = (0..d).map(|_| rng.gen::<i64>()).collect::<Vec<_>>();
let s = (0..d)
.map(|_| (rng.gen::<u64>() % 2) as i64)
.collect::<Vec<_>>();
let e = (0..d)
.map(|_| (rng.gen::<u64>() % (2 * B)) as i64 - B as i64)
.collect::<Vec<_>>();
let e1 = (0..d)
.map(|_| (rng.gen::<u64>() % (2 * B)) as i64 - B as i64)
.collect::<Vec<_>>();
let e2 = (0..k)
.map(|_| (rng.gen::<u64>() % (2 * B)) as i64 - B as i64)
.collect::<Vec<_>>();
let r = (0..d)
.map(|_| (rng.gen::<u64>() % 2) as i64)
.collect::<Vec<_>>();
let m = (0..k)
.map(|_| (rng.gen::<u64>() % effective_cleartext_t) as i64)
.collect::<Vec<_>>();
let b = polymul_rev(&a, &s)
.into_iter()
.zip(e.iter())
.map(|(x, e)| x.wrapping_add(*e))
.collect::<Vec<_>>();
let mut metadata = [0u8; METADATA_LEN];
metadata.fill_with(|| rng.gen::<u8>());
Self {
a,
e1,
e2,
r,
m,
b,
metadata,
s,
}
}
/// Encrypt using compact pke
pub fn encrypt(&self, params: PkeTestParameters) -> PkeTestCiphertext {
let PkeTestParameters {
d,
k,
B: _B,
q,
t,
msbs_zero_padding_bit_count: _msbs_zero_padding_bit_count,
} = params;
let delta = {
let q = if q == 0 { 1i128 << 64 } else { q as i128 };
// delta takes the encoding with the padding bit
(q / t as i128) as u64
};
let c1 = polymul_rev(&self.a, &self.r)
.into_iter()
.zip(self.e1.iter())
.map(|(x, e1)| x.wrapping_add(*e1))
.collect::<Vec<_>>();
let mut c2 = vec![0i64; k];
for (i, c2) in c2.iter_mut().enumerate() {
let mut dot = 0i64;
for j in 0..d {
let b = if i + j < d {
self.b[d - j - i - 1]
} else {
self.b[2 * d - j - i - 1].wrapping_neg()
};
dot = dot.wrapping_add(self.r[d - j - 1].wrapping_mul(b));
}
*c2 = dot
.wrapping_add(self.e2[i])
.wrapping_add((delta * self.m[i] as u64) as i64);
}
// Check decryption
let mut m_roundtrip = vec![0i64; k];
for i in 0..k {
let mut dot = 0i128;
for j in 0..d {
let c = if i + j < d {
c1[d - j - i - 1]
} else {
c1[2 * d - j - i - 1].wrapping_neg()
};
dot += self.s[d - j - 1] as i128 * c as i128;
}
let q = if q == 0 { 1i128 << 64 } else { q as i128 };
let val = ((c2[i] as i128).wrapping_sub(dot)) * t as i128;
let div = val.div_euclid(q);
let rem = val.rem_euclid(q);
let result = div as i64 + (rem > (q / 2)) as i64;
let result = result.rem_euclid(params.t as i64);
m_roundtrip[i] = result;
}
assert_eq!(self.m, m_roundtrip);
PkeTestCiphertext { c1, c2 }
}
}
/// Compact key params used with pkev2
pub const PKEV2_TEST_PARAMS: PkeTestParameters = PkeTestParameters {
d: 2048,
k: 1024,
B: 131072, // 2**17
q: 0,
t: 32, // 2b msg, 2b carry, 1b padding
msbs_zero_padding_bit_count: 1,
};
/// Compact key params used with pkev1
pub const PKEV1_TEST_PARAMS: PkeTestParameters = PkeTestParameters {
d: 1024,
k: 1024,
B: 4398046511104, // 2**42
q: 0,
t: 32, // 2b msg, 2b carry, 1b padding
msbs_zero_padding_bit_count: 1,
};
type Curve = tfhe_zk_pok::curve_api::Bls12_446;
#[allow(unused)]
pub fn init_params_v1(
test_params: PkeTestParameters,
) -> (
PublicParams<Curve>,
PublicCommit<Curve>,
PrivateCommit<Curve>,
[u8; METADATA_LEN],
) {
let PkeTestParameters {
d,
k,
B,
q,
t,
msbs_zero_padding_bit_count,
} = test_params;
let rng = &mut StdRng::seed_from_u64(0);
let testcase = PkeTestcase::gen(rng, test_params);
let ct = testcase.encrypt(test_params);
let public_param = crs_gen::<Curve>(d, k, B, q, t, msbs_zero_padding_bit_count, rng);
let (public_commit, private_commit) = commit(
testcase.a.clone(),
testcase.b.clone(),
ct.c1.clone(),
ct.c2.clone(),
testcase.r.clone(),
testcase.e1.clone(),
testcase.m.clone(),
testcase.e2.clone(),
&public_param,
rng,
);
(
public_param,
public_commit,
private_commit,
testcase.metadata,
)
}
#[allow(unused)]
pub fn init_params_v2(
test_params: PkeTestParameters,
) -> (
PublicParamsv2<Curve>,
PublicCommitv2<Curve>,
PrivateCommitv2<Curve>,
[u8; METADATA_LEN],
) {
let PkeTestParameters {
d,
k,
B,
q,
t,
msbs_zero_padding_bit_count,
} = test_params;
let rng = &mut StdRng::seed_from_u64(0);
let testcase = PkeTestcase::gen(rng, test_params);
let ct = testcase.encrypt(test_params);
let public_param = crs_genv2::<Curve>(d, k, B, q, t, msbs_zero_padding_bit_count, rng);
let (public_commit, private_commit) = commitv2(
testcase.a.clone(),
testcase.b.clone(),
ct.c1.clone(),
ct.c2.clone(),
testcase.r.clone(),
testcase.e1.clone(),
testcase.m.clone(),
testcase.e2.clone(),
&public_param,
rng,
);
(
public_param,
public_commit,
private_commit,
testcase.metadata,
)
}

View File

@@ -8,6 +8,7 @@ use crate::serialization::{
use core::ops::{Index, IndexMut};
use rand::{Rng, RngCore};
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
use std::fmt::Display;
use tfhe_versionable::Versionize;
#[derive(Clone, Copy, Debug, serde::Serialize, serde::Deserialize, Versionize)]
@@ -22,6 +23,15 @@ pub enum ComputeLoad {
Verify,
}
impl Display for ComputeLoad {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ComputeLoad::Proof => write!(f, "compute_load_proof"),
ComputeLoad::Verify => write!(f, "compute_load_verify"),
}
}
}
impl<T: ?Sized> OneBased<T> {
pub fn new(inner: T) -> Self
where