chore(backward): backward compat data targeted generation

This commit is contained in:
Nicolas Sarlin
2025-10-14 17:04:59 +02:00
committed by Nicolas Sarlin
parent 23d7e0d844
commit 2cdc804670
51 changed files with 4228 additions and 2877 deletions

View File

@@ -24,6 +24,8 @@ BENCH_PARAMS_SET?=default
BENCH_CUSTOM_COMMAND:=
NODE_VERSION=22.6
BACKWARD_COMPAT_DATA_DIR=utils/tfhe-backward-compat-data
BACKWARD_COMPAT_DATA_GEN_VERSION:=$(TFHE_VERSION)
CURRENT_TFHE_VERSION:=$(shell grep '^version[[:space:]]*=' tfhe/Cargo.toml | cut -d '=' -f 2 | xargs)
WASM_PACK_VERSION="0.13.1"
WASM_BINDGEN_VERSION:=$(shell cargo tree --target wasm32-unknown-unknown -e all --prefix none | grep "wasm-bindgen v" | head -n 1 | cut -d 'v' -f2)
WEB_RUNNER_DIR=web-test-runner
@@ -507,7 +509,7 @@ clippy_backward_compat_data: install_rs_check_toolchain # the toolchain is selec
@# Some old crates are x86 specific, only run in that case
@if uname -a | grep -q x86; then \
RUSTFLAGS="$(RUSTFLAGS)" cargo "$(CARGO_RS_CHECK_TOOLCHAIN)" -Z unstable-options \
-C $(BACKWARD_COMPAT_DATA_DIR) clippy --all-targets \
-C $(BACKWARD_COMPAT_DATA_DIR) clippy --all --all-targets \
-- --no-deps -D warnings; \
else \
echo "Cannot run clippy for backward compat crate on non x86 platform for now."; \
@@ -1146,8 +1148,16 @@ test_tfhe_lints: install_cargo_dylint
rustup toolchain install && \
cargo test
# The backward compat data repo holds historical binary data but also rust code to generate and load them.
# Here we use the "patch" functionality of Cargo to make sure the repo used for the data is the same as the one used for the code.
# The backward compat data folder holds historical binary data but also rust code to generate and load them.
.PHONY: gen_backward_compat_data # Re-generate backward compatibility data
gen_backward_compat_data: install_rs_check_toolchain # the toolchain is selected with toolchain.toml
$(BACKWARD_COMPAT_DATA_DIR)/gen_data.sh $(BACKWARD_COMPAT_DATA_GEN_VERSION)
# Instantiate a new backward data crate for the current TFHE-rs version, if it does not already exists
.PHONY: new_backward_compat_crate
new_backward_compat_crate: install_rs_check_toolchain # the toolchain is selected with toolchain.toml
cd $(BACKWARD_COMPAT_DATA_DIR) && cargo run -p add_new_version -- --tfhe-version $(CURRENT_TFHE_VERSION)
.PHONY: test_backward_compatibility_ci
test_backward_compatibility_ci: install_rs_build_toolchain
TFHE_BACKWARD_COMPAT_DATA_DIR="../$(BACKWARD_COMPAT_DATA_DIR)" RUSTFLAGS="$(RUSTFLAGS)" cargo $(CARGO_RS_BUILD_TOOLCHAIN) test --profile $(CARGO_PROFILE) \

View File

@@ -7,9 +7,7 @@ publish = false
[dev-dependencies]
tfhe = { path = "../tfhe" }
tfhe-versionable = { path = "../utils/tfhe-versionable" }
tfhe-backward-compat-data = { path = "../utils/tfhe-backward-compat-data", default-features = false, features = [
"load",
] }
tfhe-backward-compat-data = { path = "../utils/tfhe-backward-compat-data" }
rand = { workspace = true }
cargo_toml = "0.22"

View File

@@ -13,13 +13,13 @@ use std::str::FromStr;
use cargo_toml::Manifest;
use tfhe_backward_compat_data::load::{load_tests_metadata, DataFormat, TestFailure, TestResult};
use tfhe_backward_compat_data::{data_dir, dir_for_version, TestType, Testcase};
use tfhe_backward_compat_data::{dir_for_version, TestType, Testcase};
use tfhe_versionable::Unversionize;
fn test_data_dir() -> PathBuf {
// Try to load the test data from the user provided environment variable or default to a
// hardcoded path
let root_dir = if let Ok(dir_str) = env::var("TFHE_BACKWARD_COMPAT_DATA_DIR") {
let mut root_dir = if let Ok(dir_str) = env::var("TFHE_BACKWARD_COMPAT_DATA_DIR") {
PathBuf::from_str(&dir_str).unwrap()
} else {
PathBuf::from_str(env!("CARGO_MANIFEST_DIR"))
@@ -33,7 +33,8 @@ fn test_data_dir() -> PathBuf {
panic!("Wrong backward compat data folder: {}", root_dir.display())
}
data_dir(root_dir)
root_dir.push("data");
root_dir
}
fn tfhe_manifest_dir() -> PathBuf {

View File

@@ -1,106 +1,27 @@
[workspace]
resolver = "2"
members = ["crates/*"]
default-members = ["crates/generate"]
[workspace.package]
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[workspace.dependencies]
clap = { version = "4.5", features = ["derive"] }
semver = { version = "1.0" }
[package]
name = "tfhe-backward-compat-data"
version = "0.8.0"
license = "BSD-3-Clause-Clear"
edition = "2021"
edition = "2024"
license.workspace = true
version.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# This is a list of tfhe-rs versions we will generate data for. This list will grow over time.
# They are only activated when generating data, with the binary target and the "generate" feature.
tfhe_0_8 = { version = "0.8", features = [
"boolean",
"integer",
"shortint",
"x86_64-unix",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", optional = true }
tfhe_0_10 = { version = "0.10", features = [
"boolean",
"integer",
"shortint",
"x86_64-unix",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", optional = true }
tfhe_0_11 = { version = "0.11.2", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", optional = true }
tfhe_1_0 = { version = "=1.0.0", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", optional = true }
# From here on we need to use git tag dependencies because versions are semver compatibles
tfhe_1_1 = { git = "https://github.com/zama-ai/tfhe-rs.git", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", tag = "tfhe-rs-1.1.0", optional = true }
tfhe_1_3 = { git = "https://github.com/zama-ai/tfhe-rs.git", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", tag = "tfhe-rs-1.3.0", optional = true }
tfhe_1_4 = { git = "https://github.com/zama-ai/tfhe-rs.git", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], package = "tfhe", tag = "tfhe-rs-1.4.1", optional = true }
# TFHE-rs 0.8 and 0.10 use the same version of versionable
tfhe-versionable = { version = "0.3.2", optional = true, package = "tfhe-versionable" }
tfhe_0_11-versionable = { version = "0.4.0", optional = true, package = "tfhe-versionable" }
tfhe_1_0-versionable = { version = "0.5.0", optional = true, package = "tfhe-versionable" }
tfhe_1_1-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.1.0", optional = true, package = "tfhe-versionable" }
tfhe_1_3-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.3.0", optional = true, package = "tfhe-versionable" }
tfhe_1_4-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.4.1", optional = true, package = "tfhe-versionable" }
# other deps
serde = { version = "1.0", features = ["derive"] }
strum = { version = "0.26", features = ["derive"] }
semver = { version = "1.0", optional = true }
ron = { version = "0.8", features = ["integer128"] }
ciborium = "0.2"
bincode = "1.3"
[[bin]]
name = "tfhe-backward-compat-data"
required-features = ["generate"]
[features]
default = ["generate"]
generate = [
"dep:tfhe_0_8",
"dep:tfhe_0_10",
"dep:tfhe_0_11",
"dep:tfhe_1_0",
"dep:tfhe_1_1",
"dep:tfhe_1_3",
"dep:tfhe_1_4",
"dep:tfhe-versionable",
"dep:tfhe_0_11-versionable",
"dep:tfhe_1_0-versionable",
"dep:tfhe_1_1-versionable",
"dep:tfhe_1_3-versionable",
"dep:tfhe_1_4-versionable",
]
load = ["dep:semver"]
strum = { version = "0.26", features = ["derive"] }
semver.workspace = true

View File

@@ -22,12 +22,27 @@ This test will load the data stored in this folder, try to convert them to the l
## Data generation
First you need to make sure that you have pulled the LFS data (see above).
To re-generate the data, run the binary target for this project: `cargo run --release`. The prng is seeded with a fixed seed, so the data should be identical.
To re-generate all the data, run the following command:
```
make gen_backward_compat_data
```
You can generate the data only for a specific TFHE-rs version using an environment variable:
```
TFHE_VERSION=1.4 make gen_backward_compat_data
```
## Adding a test for an existing type
To add a new test for a type that is already tested, you need to create a const global variable with the metadata for that test. The type of metadata depends on the type being tested (for example, the metadata for a test of the `ClientKey` from the `high_level_api` is `HlClientKey`). Then go to the `data_vvv.rs` file (where "vvv" is the TFHE-rs version of the tested data) and update the `gen_xxx_data` method (where "xxx" is the API layer of your test (hl, shortint, integer,...)). In this method, create the object you want to test and serialize it using the `store_versioned_test` macro. Add the metadata of your test to the vector returned by this method.
To add a new test for a type that is already tested, you need to update the file named `crates/generate_vvv/src/lib.rs` file (where "vvv" is the TFHE-rs version of the tested data). The data must be generated for the earliest version were they are available. For example, the `generate_1_4` crate should define the data generation for all the types introduced with the 1.4 release.
The test will be automatically selected when you run TFHE-rs `make test_backward_compatibility`.
See [here](#adding-a-new-tfhe-rs-version) if the corresponding crate does not exist yet.
First, create a const global variable with the metadata for that test. The type of metadata depends on the type being tested (for example, the metadata for a test of the `ClientKey` from the `high_level_api` is `HlClientKey`). Then update the `gen_xxx_data` method (where "xxx" is the API layer of your test (hl, shortint, integer,...)). In this method, create the object you want to test and serialize it using the `store_versioned_test` function.
If the test requires auxiliary data that is not itself tested (for example a `ClientKey` to check that values are correct), store them using the `store_versioned_auxiliary` function. This can be skipped if this auxiliary data is already stored from another test.
Finally, add the metadata of your test to the vector returned by this method.
The new test will be automatically selected when you run TFHE-rs `make test_backward_compatibility`.
### Example
```rust
@@ -40,7 +55,7 @@ const HL_CT1_TEST: HlCiphertextTest = HlCiphertextTest {
clear_value: 0,
};
impl TfhersVersion for V0_6 {
impl TfhersVersion for V0_8 {
// ...
// Impl of trait
// ...
@@ -51,12 +66,15 @@ impl TfhersVersion for V0_6 {
// ...
// 2. Create the type
let ct1 = fheint8::encrypt(HL_CT1_TEST.clear_value, &hl_client_key);
let ct1 = FheInt8::encrypt(HL_CT1_TEST.clear_value, &hl_client_key);
// 3. Store it
store_versioned_test!(&ct1, &dir, &HL_CT1_TEST.test_filename);
store_versioned_test(&ct1, &dir, &HL_CT1_TEST.test_filename);
// 4. Return the metadata
// 4. Store the client key that will be needed when running the test
store_versioned_auxiliary(&hl_client_key, &dir, &HL_CT1_TEST.key_filename);
// 5. Return the metadata
vec![
TestMetadata::HlCiphertext(HL_CT1_TEST),
// ...
@@ -73,7 +91,7 @@ impl TfhersVersion for V0_6 {
### In this folder
To add a test for a type that has not yet been tested, you should create a new type that implements the `TestType` trait. The type should also store the metadata needed for the test, and be serializable. By convention, its name should start with the API layer being tested. The metadata can be anything that can be used to check that the correct value is retrieved after deserialization. However, it should not use a TFHE-rs internal type.
Once the type is created, it should be added to the `TestMetadata` enum. You can then add a new testcase using the procedure in the previous paragraph.
Once the type is created, it should be added to the `TestMetadata` enum. You can then add a new testcase using the procedure in the previous section.
#### Example
```rust
@@ -114,7 +132,7 @@ pub enum TestMetadata {
```
### In TFHE-rs
In TFHE-rs, you should update the test driver (in `tests/backward_compatibility/`) to handle your new test type. To do this, create a function that loads and unversionizes the message, and then checks its value against the metadata provided:
In TFHE-rs, you should update the test driver (in `tests/backward_compatibility/`) to handle your new test type. To do this, create a function that loads and unversionizes the message, and then checks its value against the provided metadata:
#### Example
```rust
@@ -188,15 +206,19 @@ impl TestedModule for Hl {
}
```
## Adding a new tfhe-rs release
To add data for a new released version of tfhe-rs, you should first add a dependency to that version in the `Cargo.toml` of this project. This dependency should only be enabled with the `generate` feature to avoid conflicts during testing.
## Adding a new tfhe-rs version
The backward data generation uses a different crate for each version, to avoid having multiple TFHE-rs dependencies.
You should then implement the `TfhersVersion` trait for this version. You can use the code in `data_0_6.rs` as an example.
There is a make target to instantiate the crate for the upcoming TFHE-rs version. First, you need to make sure that the version in `tfhe/Cargo.toml` has already been set to the next release (it should not be an already released version).
## Using the test data
The data is stored using git-lfs, but they are not pulled by default. You need to pull them by running:
Then, simply run the following command:
```
make pull_backward_compat_data
make new_backward_compat_crate
```
This will create a new folder in `utils/tfhe-backward-compat-data/crates/` with a templated crate for the new version.
To be able to parse the metadata and check if the loaded data is valid, you should add this crate as a dependency with the `load` feature enabled.
To complete it, you must simply replace the `// <TODO>` comments with your data generating code:
1. In `src/utils.rs`, copy the `ConvertParams` impl blocks from the previous version. Adapt them to the parameter types of the current version.
2. Update `src/lib.rs`. In this file, complete the `seed_prng` method that should seed the shortint and boolean prng for this version. Then, add your data generation using the procedure defined [above](#adding-a-test-for-an-existing-type)
Once this version has been properly released, update the `Cargo.toml` to use the git tag of the release instead of the relative path for the `tfhe` crate dependency.

View File

@@ -0,0 +1,12 @@
[package]
name = "add_new_version"
edition = "2024"
version.workspace = true
license.workspace = true
[dependencies]
clap.workspace = true
minijinja = { version = "1.0", features = ["loader"] }
serde = { version = "1.0", features = ["derive"] }
cargo_toml = "0.22"
semver.workspace = true

View File

@@ -0,0 +1,217 @@
//! Instantiate a new data generation crate for a version of TFHE-rs.
use cargo_toml::Manifest;
use clap::Parser;
use minijinja::Environment;
use semver::Version;
use serde::Serialize;
use std::error::Error;
use std::path::{Path, PathBuf};
use std::{env, fs};
/// Relative dir where the templates are found from the Cargo.toml of this crate
const RELATIVE_TEMPLATE_PATH: &str = "template";
/// Relative dir where the generated crates must be stored from the Cargo.toml of this crate
const RELATIVE_CRATES_PATH: &str = "..";
/// Variables that should be replaced in the templates
#[derive(Serialize)]
struct TemplateVars {
/// Short version: "1.4"
tfhe_version_short: String,
/// Complete version: "1.4.0"
tfhe_version_exact: String,
/// Short version with an underscore: "1_4"
tfhe_version_underscored: String,
}
#[derive(Debug, Clone)]
struct CrateVersion(Version);
impl CrateVersion {
fn exact(&self) -> String {
format!("{}.{}.{}", self.0.major, self.0.minor, self.0.patch)
}
fn short(&self) -> String {
format!("{}.{}", self.0.major, self.0.minor)
}
fn underscored(&self) -> String {
format!("{}_{}", self.0.major, self.0.minor)
}
}
#[derive(Parser, Debug)]
struct Args {
#[arg(long, value_parser = parse_version, help = "TFHE-rs version to instantiate, as <major.minor.patch> (e.g, '1.4.0')")]
tfhe_version: CrateVersion,
}
fn parse_version(input: &str) -> Result<CrateVersion, String> {
Version::parse(input)
.map_err(|e| e.to_string())
.map(CrateVersion)
}
/// Recursively processes a directory, rendering templates and copying files.
fn process_dir(
env: &Environment,
vars: &TemplateVars,
template_dir: &Path,
src_dir: &Path,
crate_dir: &Path,
) -> Result<(), Box<dyn Error>> {
fs::create_dir_all(crate_dir)?;
for entry in fs::read_dir(src_dir)? {
let entry = entry?;
let src_path = entry.path();
let dest_path = crate_dir.join(entry.file_name());
if src_path.is_dir() {
process_dir(env, vars, template_dir, &src_path, &dest_path)?;
} else if src_path.is_file() {
// Template files should be processed
if src_path.extension().is_some_and(|s| s == "j2") {
let mut dest_path_rendered = dest_path.clone();
// Remove the extra ".j2" extension
dest_path_rendered.set_extension("");
println!(
"Rendering {} -> {}",
src_path.display(),
dest_path_rendered.display()
);
let template_name = src_path
.strip_prefix(template_dir)?
.to_str()
.ok_or("Invalid template path")?;
let template = env.get_template(template_name)?;
let rendered_content = template.render(vars)?;
fs::write(&dest_path_rendered, rendered_content)?;
} else {
// Regular files are simply copied
println!("Copying {} -> {}", src_path.display(), dest_path.display());
fs::copy(&src_path, &dest_path)?;
}
}
}
Ok(())
}
/// Gets the list of previously implemented versions by parsing the crates directory
fn get_existing_versions(crates_dir: &Path) -> Result<Vec<CrateVersion>, Box<dyn Error>> {
fs::read_dir(crates_dir)?
.filter_map(|entry| entry.ok())
.filter_map(|entry| {
entry
.file_name()
.to_string_lossy()
.strip_prefix("generate_")
.map(|s| s.replace("_", "."))
})
.flat_map(|version_str| {
parse_version(&version_str)
.map(|version| check_existing_version(crates_dir, version.clone()).map(|_| version))
})
.collect()
}
/// Returns an error if the previous version is still using a path in its Cargo.toml
fn check_existing_version(crates_dir: &Path, version: CrateVersion) -> Result<(), Box<dyn Error>> {
let crate_name = format!("generate_{}", version.underscored());
let previous_version_crate_dir = crates_dir.join(&crate_name);
let manifest = Manifest::from_path(previous_version_crate_dir.join("Cargo.toml"))?;
if manifest.dependencies["tfhe"]
.detail()
.ok_or(format!("Missing TFHE dependency in {crate_name}"))?
.path
.is_some()
{
return Err(format!(
"{crate_name} is still using a path dependency for TFHE, please fix it and re-run this \
command.\n\n"
)
.into());
}
if manifest.dependencies["tfhe-versionable"]
.detail()
.ok_or(format!(
"Missing tfhe-versionable dependency in {crate_name}"
))?
.path
.is_some()
{
return Err(format!(
"{crate_name} is still using a path dependency for versionable, please fix it and \
re-run this command.\n\n"
)
.into());
}
Ok(())
}
fn main() -> Result<(), Box<dyn Error>> {
let args = Args::parse();
let tfhe_version_exact = args.tfhe_version.exact();
let tfhe_version_underscored = args.tfhe_version.underscored();
let crate_name = format!("generate_{}", tfhe_version_underscored);
let base_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let all_versions_dir = base_dir.join(RELATIVE_CRATES_PATH);
let version_crate_dir = all_versions_dir.join(&crate_name);
if version_crate_dir.exists() {
return Err(format!(
"Output directory '{}' already exists.",
version_crate_dir.display()
)
.into());
}
for version in get_existing_versions(&all_versions_dir)? {
check_existing_version(&all_versions_dir, version)?
}
println!(
"Instantiating data generation crate '{}' for TFHE-rs version {}\n",
crate_name, tfhe_version_exact
);
let vars = TemplateVars {
tfhe_version_exact,
tfhe_version_short: args.tfhe_version.short(),
tfhe_version_underscored,
};
let template_dir = base_dir.join(RELATIVE_TEMPLATE_PATH);
let mut env = Environment::new();
env.set_keep_trailing_newline(true);
env.set_loader(minijinja::path_loader(&template_dir));
process_dir(
&env,
&vars,
&template_dir,
&template_dir,
&version_crate_dir,
)?;
println!(
"\nSuccessfully instantiated crate in '{}'\n
Now you can edit the `// <TODO>` comments to add the code to generate your data",
version_crate_dir.display()
);
Ok(())
}

View File

@@ -0,0 +1,30 @@
[package]
name = "generate_{{ tfhe_version_underscored }}"
edition = "2024"
license.workspace = true
version.workspace = true
[dependencies]
clap.workspace = true
# TFHE-rs
tfhe = { features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
], path = "../../../../tfhe" }
tfhe-versionable = { path = "../../../tfhe-versionable" }
# Uncomment this and remove the lines above once the current tfhe-rs version has been released
# tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-{{ tfhe_version_exact }}", features = [
# "boolean",
# "integer",
# "shortint",
# "zk-pok",
# "experimental-force_fft_algo_dif4",
# ] }
# tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-{{ tfhe_version_exact }}" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,102 @@
mod utils;
use utils::*;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
// <TODO> To complete this file, you can use examples in the data generation crates for
// <TODO> other versions.
// <TODO> Here you can add constants that defines the metadata for your data tests.
// <TODO> The metadata should use one of the types inside the `TestMetadata` enum in
// <TODO> `tfhe-backward-compat-data/src/lib.rs`.
// <TODO> Feel free to add a new variant if none of the existing one cover your needs.
// <TODO>
// <TODO> Example:
// <TODO> const SHORTINT_CLIENT_KEY_FILENAME: &str = "client_key";
// <TODO>
// <TODO> const SHORTINT_CLIENTKEY_TEST: ShortintClientKeyTest = ShortintClientKeyTest {
// <TODO> test_filename: Cow::Borrowed(SHORTINT_CLIENT_KEY_FILENAME),
// <TODO> parameters: VALID_TEST_PARAMS,
// <TODO> };
// <TODO> const SHORTINT_CT_TEST: ShortintCiphertextTest = ShortintCiphertextTest {
// <TODO> test_filename: Cow::Borrowed("ct"),
// <TODO> key_filename: Cow::Borrowed(SHORTINT_CLIENT_KEY_FILENAME),
// <TODO> clear_value: 0,
// <TODO> };
pub struct V{{ tfhe_version_underscored }};
impl TfhersVersion for V{{ tfhe_version_underscored }} {
const VERSION_NUMBER: &'static str = "{{ tfhe_version_short }}";
fn seed_prng(seed: u128) {
// <TODO> Include here the code required to seed the prng for this version of TFHE-rs.
// <TODO> This might require: seeding shortint and boolean engines
// <TODO>
// <TODO> Example:
// <TODO> let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
// <TODO> let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
// <TODO> ShortintEngine::with_thread_local_mut(|local_engine| {
// <TODO> let _ = std::mem::replace(local_engine, shortint_engine);
// <TODO> });
// <TODO>
// <TODO> let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
// <TODO> BooleanEngine::replace_thread_local(boolean_engine);
todo!()
}
// <TODO> You now need to generate the data for the shortint and hl layer. This means:
// <TODO> - Create the TFHE-rs objects you want to test from the metadata defined above
// <TODO> - Store them using `store_versioned_test`
// <TODO> - If the test needs some auxiliary data (such as a client key), store it with
// <TODO> `store_versioned_auxiliary`
// <TODO> - Returns all the metadata in a Vec
// <TODO>
// <TODO> Example:
// <TODO> fn gen_shortint_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
// <TODO> let dir = Self::data_dir(base_data_dir).join(SHORTINT_MODULE_NAME);
// <TODO> create_dir_all(&dir).unwrap();
// <TODO>
// <TODO> // generate a client key
// <TODO> let shortint_client_key =
// <TODO> shortint::ClientKey::new(SHORTINT_CLIENTKEY_TEST.parameters.convert());
// <TODO>
// <TODO> store_versioned_test(
// <TODO> &shortint_client_key,
// <TODO> &dir,
// <TODO> &SHORTINT_CLIENTKEY_TEST.test_filename,
// <TODO> );
// <TODO>
// <TODO> // generate a ciphertext
// <TODO> let ct = shortint_client_key.encrypt(SHORTINT_CT_TEST.clear_value);
// <TODO>
// <TODO> // Serialize it
// <TODO> store_versioned_test(&ct, &dir, &SHORTINT_CT_TEST.test_filename);
// <TODO>
// <TODO> vec![
// <TODO> TestMetadata::ShortintClientKey(SHORTINT_CLIENTKEY_TEST),
// <TODO> TestMetadata::ShortintCiphertext(SHORTINT_CT_TEST),
// <TODO> ]
// <TODO> }
fn gen_shortint_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
// <TODO> Remove this if you do not generate shortint data for this version
let dir = Self::data_dir(base_data_dir).join(SHORTINT_MODULE_NAME);
create_dir_all(&dir).unwrap();
// <TODO> Add shortint data here
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
// <TODO> Remove this if you do not generate hl data for this version
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
// <TODO> Add hl data here
Vec::new()
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_{{ tfhe_version_underscored }}::V{{ tfhe_version_underscored }};
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "{{ tfhe_version_short }}");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V{{ tfhe_version_underscored }}>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,35 @@
use std::path::Path;
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
// <TODO> Add here the impl of ConvertParams for the TestXXXParameterSet that you need.
// <TODO> You can start by simply copying the implementations of this trait from the crate for
// <TODO> the previous version, and then eventually fix parameter types that have been updated.

View File

@@ -0,0 +1,11 @@
[package]
name = "generate"
edition = "2024"
license.workspace = true
version.workspace = true
[dependencies]
clap.workspace = true
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,75 @@
//! Generates data for all supported TFHE-rs by calling the corresponding "generate_VERS" utility.
//! Collects the metadata into ron files.
use clap::Parser;
use std::fs;
use std::path::PathBuf;
use std::process::{Command, Stdio};
use tfhe_backward_compat_data::generate::{load_metadata_from_str, store_metadata};
/// Relative dir where the generated crates must be stored from the Cargo.toml of this crate
const RELATIVE_CRATES_PATH: &str = "..";
#[derive(Parser, Debug)]
struct Args {
#[arg(long)]
data_path: PathBuf,
}
fn main() {
let args = Args::parse();
let base_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
let crates_dir = base_dir.join(RELATIVE_CRATES_PATH);
// Parse the list of versions that we can generate data for
let all_versions = fs::read_dir(crates_dir)
.unwrap()
.filter_map(|entry| entry.ok())
.filter_map(|entry| {
let name = entry.file_name();
if name.to_str()?.starts_with("generate_") {
Some(entry.path())
} else {
None
}
});
// Run the data generation tool for all versions
let mut handles = vec![];
for dir in all_versions {
let data_path = args.data_path.clone();
let name = dir.file_name().unwrap().display();
println!("Generating data from {}", dir.display());
let handle = Command::new("cargo")
.arg("run")
.arg("--quiet")
.arg("--release")
.arg("--")
.arg("--data-path")
.arg(data_path)
.arg("--stdout")
.current_dir(&dir)
.stdout(Stdio::piped())
.spawn()
.unwrap_or_else(|_| panic!("{} failed to execute", name));
handles.push(handle);
}
// Collect the metadata
let mut testcases = vec![];
for handle in handles {
match handle.wait_with_output() {
Ok(ron_output) => testcases.extend(load_metadata_from_str(
str::from_utf8(&ron_output.stdout).unwrap(),
)),
Err(e) => {
eprintln!("Failed to generate data: {}", e);
}
}
}
store_metadata(testcases, args.data_path);
}

View File

@@ -0,0 +1,21 @@
[package]
name = "generate_0_10"
edition = "2024"
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.10.0", features = [
"boolean",
"integer",
"shortint",
"x86_64-unix",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.10.0" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,83 @@
mod utils;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::prelude::ActivatedRandomGenerator;
use tfhe::shortint::engine::ShortintEngine;
use tfhe::{CompressedServerKey, Seed};
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const HL_CLIENTKEY_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key"),
parameters: VALID_TEST_PARAMS_TUNIFORM,
};
const HL_COMPRESSED_SERVERKEY_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key"),
client_key_filename: Cow::Borrowed("client_key.cbor"),
rerand_cpk_filename: None,
compressed: true,
};
const HL_SERVERKEY_WITH_COMPRESSION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_compression"),
client_key_filename: Cow::Borrowed("client_key.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
pub struct V0_10;
impl TfhersVersion for V0_10 {
const VERSION_NUMBER: &'static str = "0.10";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<ActivatedRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<crate::TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<crate::TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config =
tfhe::ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters.convert())
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.convert())
.build();
let (hl_client_key, hl_server_key) = tfhe::generate_keys(config);
let compressed_server_key = CompressedServerKey::new(&hl_client_key);
store_versioned_test(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename);
store_versioned_test(
&compressed_server_key,
&dir,
&HL_COMPRESSED_SERVERKEY_TEST.test_filename,
);
store_versioned_test(
&hl_server_key,
&dir,
&HL_SERVERKEY_WITH_COMPRESSION_TEST.test_filename,
);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_TEST),
TestMetadata::HlServerKey(HL_COMPRESSED_SERVERKEY_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_WITH_COMPRESSION_TEST),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_0_10::V0_10;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "0.10");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V0_10>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,196 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CompressionParameters,
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice,
GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
PolynomialSize, StandardDev,
};
use tfhe::shortint::{MultiBitPBSParameters, PBSParameters};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params: _,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<PBSParameters> for TestParameterSet {
fn convert(self) -> PBSParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.convert())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}

View File

@@ -0,0 +1,20 @@
[package]
name = "generate_0_11"
edition = "2024"
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.11.0", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.11.0" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,157 @@
mod utils;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::commons::math::random::RandomGenerator;
use tfhe::core_crypto::prelude::{DefaultRandomGenerator, LweCiphertextCount, TUniform};
use tfhe::shortint::engine::ShortintEngine;
use tfhe::shortint::parameters::{CiphertextModulus, LweDimension};
use tfhe::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe::{
ClientKey, CompactPublicKey, ProvenCompactCiphertextList, Seed, ServerKey, set_server_key,
};
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const HL_CLIENTKEY_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key"),
parameters: VALID_TEST_PARAMS_TUNIFORM,
};
// The CRS is structurally equivalent to the public params type so we reuse the test
const ZK_PKE_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pke_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
const HL_PROVEN_COMPACTLIST_TEST_ZKV2: HlHeterogeneousCiphertextListTest =
HlHeterogeneousCiphertextListTest {
test_filename: Cow::Borrowed("hl_proven_heterogeneous_list_zkv2"),
key_filename: HL_CLIENTKEY_TEST.test_filename,
clear_values: Cow::Borrowed(&[17u8 as u64, -12i8 as u64, false as u64, true as u64]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
compressed: false,
proof_info: Some(PkeZkProofAuxiliaryInfo {
public_key_filename: Cow::Borrowed("public_key"),
params_filename: Cow::Borrowed("zk_pke_crs"),
metadata: Cow::Borrowed("2vdrawkcab"),
}),
};
pub struct V0_11;
impl TfhersVersion for V0_11 {
const VERSION_NUMBER: &'static str = "0.11";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
// Generate a compact public key needed to create a compact list
let config =
tfhe::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let compact_pub_key = CompactPublicKey::new(&hl_client_key);
let crs = CompactPkeCrs::new(
LweDimension(ZK_PKE_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKE_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKE_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKE_CRS_TEST.ciphertext_modulus),
ZK_PKE_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKE_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
store_versioned_test(&crs, &dir, &ZK_PKE_CRS_TEST.test_filename);
// Store the associated client key to be able to decrypt the ciphertexts in the list
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2.key_filename,
);
store_versioned_auxiliary(
&compact_pub_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2
.proof_info
.unwrap()
.public_key_filename,
);
let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key);
proven_builder
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[0] as u8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[1] as i8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[2] != 0)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[3] != 0);
let proven_list_packed = proven_builder
.build_with_proof_packed(
&crs,
HL_PROVEN_COMPACTLIST_TEST_ZKV2
.proof_info
.unwrap()
.metadata
.as_bytes(),
ZkComputeLoad::Proof,
)
.unwrap();
store_versioned_test(
&proven_list_packed,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2.test_filename,
);
vec![
TestMetadata::ZkPkePublicParams(ZK_PKE_CRS_TEST),
TestMetadata::HlHeterogeneousCiphertextList(HL_PROVEN_COMPACTLIST_TEST_ZKV2),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_0_11::V0_11;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "0.11");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V0_11>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,196 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::PBSParameters;
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CompressionParameters,
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice,
GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
MultiBitPBSParameters, PolynomialSize, StandardDev,
};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params: _,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<PBSParameters> for TestParameterSet {
fn convert(self) -> PBSParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.convert())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}

View File

@@ -0,0 +1,21 @@
[package]
name = "generate_0_8"
edition = "2024"
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.8.3", features = [
"boolean",
"integer",
"shortint",
"x86_64-unix",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-0.8.3" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -1,190 +1,27 @@
mod utils;
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_0_8::boolean::engine::BooleanEngine;
use tfhe_0_8::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_0_8::core_crypto::commons::math::random::RandomGenerator;
use tfhe_0_8::core_crypto::prelude::{
ActivatedRandomGenerator, CiphertextModulusLog, LweCiphertextCount, TUniform,
};
use tfhe_0_8::integer::parameters::{
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, GlweDimension,
LweDimension, PolynomialSize, StandardDev,
};
use tfhe_0_8::prelude::*;
use tfhe_0_8::shortint::engine::ShortintEngine;
use tfhe_0_8::shortint::parameters::list_compression::CompressionParameters;
use tfhe_0_8::shortint::parameters::LweBskGroupingFactor;
use tfhe_0_8::shortint::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, EncryptionKeyChoice, MaxNoiseLevel,
MessageModulus, MultiBitPBSParameters, PBSParameters,
};
use tfhe_0_8::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe_0_8::{
generate_keys, set_server_key, shortint, ClientKey, CompactCiphertextList, CompactPublicKey,
CompressedCiphertextListBuilder, CompressedCompactPublicKey, CompressedFheBool,
CompressedFheInt8, CompressedFheUint8, CompressedPublicKey, ConfigBuilder, FheBool, FheInt8,
FheUint8, ProvenCompactCiphertextList, PublicKey, Seed, ServerKey,
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::commons::math::random::RandomGenerator;
use tfhe::core_crypto::prelude::{ActivatedRandomGenerator, LweDimension, TUniform};
use tfhe::prelude::*;
use tfhe::shortint::CiphertextModulus;
use tfhe::shortint::engine::ShortintEngine;
use tfhe::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe::{
ClientKey, CompactCiphertextList, CompactPublicKey, CompressedCiphertextListBuilder,
CompressedCompactPublicKey, CompressedFheBool, CompressedFheInt8, CompressedFheUint8,
CompressedPublicKey, ConfigBuilder, FheBool, FheInt8, FheUint8, ProvenCompactCiphertextList,
PublicKey, Seed, ServerKey, generate_keys, set_server_key, shortint,
};
use crate::generate::{
store_versioned_auxiliary_tfhe_0_8, store_versioned_test_tfhe_0_8, TfhersVersion,
INSECURE_SMALL_PK_TEST_PARAMS, PRNG_SEED, VALID_TEST_PARAMS, VALID_TEST_PARAMS_TUNIFORM,
VALID_TEST_PARAMS_TUNIFORM_COMPRESSION,
};
use crate::{
DataKind, HlBoolCiphertextTest, HlCiphertextTest, HlClientKeyTest,
HlHeterogeneousCiphertextListTest, HlPublicKeyTest, HlSignedCiphertextTest,
PkeZkProofAuxiliaryInfo, ShortintCiphertextTest, ShortintClientKeyTest,
TestClassicParameterSet, TestCompressionParameterSet, TestDistribution, TestMetadata,
TestMultiBitParameterSet, TestParameterSet, ZkPkePublicParamsTest, HL_MODULE_NAME,
SHORTINT_MODULE_NAME,
};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_0_8($msg, $dir, $test_filename)
};
}
macro_rules! store_versioned_auxiliary {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_auxiliary_tfhe_0_8($msg, $dir, $test_filename)
};
}
impl From<TestDistribution> for DynamicDistribution<u64> {
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus),
carry_modulus: CarryModulus(value.carry_modulus),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestParameterSet> for PBSParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.into())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for this version")
}
}
}
}
impl From<TestCompressionParameterSet> for CompressionParameters {
fn from(value: TestCompressionParameterSet) -> Self {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = value;
Self {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.into(),
}
}
}
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
// Shortint test constants
const SHORTINT_CLIENT_KEY_FILENAME: &str = "client_key";
@@ -427,14 +264,15 @@ impl TfhersVersion for V0_8 {
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(SHORTINT_MODULE_NAME);
fn gen_shortint_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(SHORTINT_MODULE_NAME);
create_dir_all(&dir).unwrap();
// generate a client key
let shortint_client_key = shortint::ClientKey::new(SHORTINT_CLIENTKEY_TEST.parameters);
let shortint_client_key =
shortint::ClientKey::new(SHORTINT_CLIENTKEY_TEST.parameters.convert());
store_versioned_test!(
store_versioned_test(
&shortint_client_key,
&dir,
&SHORTINT_CLIENTKEY_TEST.test_filename,
@@ -445,8 +283,8 @@ impl TfhersVersion for V0_8 {
let ct2 = shortint_client_key.encrypt(SHORTINT_CT2_TEST.clear_value);
// Serialize them
store_versioned_test!(&ct1, &dir, &SHORTINT_CT1_TEST.test_filename);
store_versioned_test!(&ct2, &dir, &SHORTINT_CT2_TEST.test_filename);
store_versioned_test(&ct1, &dir, &SHORTINT_CT1_TEST.test_filename);
store_versioned_test(&ct2, &dir, &SHORTINT_CT2_TEST.test_filename);
vec![
TestMetadata::ShortintClientKey(SHORTINT_CLIENTKEY_TEST),
@@ -455,8 +293,8 @@ impl TfhersVersion for V0_8 {
]
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let mut all_tests = vec![];
@@ -464,40 +302,42 @@ impl TfhersVersion for V0_8 {
{
// generate keys
let config =
ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters).build();
ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters.convert())
.build();
let (hl_client_key, hl_server_key) = generate_keys(config);
// Here we use specific parameters to generate a smaller public key.
// WARNING: these parameters are completely insecure
let params_pk = INSECURE_SMALL_PK_TEST_PARAMS;
let client_key_for_pk =
ClientKey::generate(ConfigBuilder::with_custom_parameters(params_pk).build());
let client_key_for_pk = ClientKey::generate(
ConfigBuilder::with_custom_parameters(params_pk.convert()).build(),
);
let pub_key = PublicKey::new(&client_key_for_pk);
let compressed_pub_key = CompressedPublicKey::new(&hl_client_key);
let compact_pub_key = CompactPublicKey::new(&hl_client_key);
let compressed_compact_pub_key = CompressedCompactPublicKey::new(&hl_client_key);
store_versioned_test!(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename);
store_versioned_test(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename);
store_versioned_test!(&pub_key, &dir, &HL_LEGACY_PUBKEY_TEST.test_filename);
store_versioned_auxiliary!(
store_versioned_test(&pub_key, &dir, &HL_LEGACY_PUBKEY_TEST.test_filename);
store_versioned_auxiliary(
&client_key_for_pk,
&dir,
&HL_LEGACY_PUBKEY_TEST.client_key_filename,
);
store_versioned_test!(
store_versioned_test(
&compressed_pub_key,
&dir,
&HL_COMPRESSED_LEGACY_PUBKEY_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&compact_pub_key,
&dir,
&HL_COMPACT_PUBKEY_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&compressed_compact_pub_key,
&dir,
&HL_COMPRESSED_COMPACT_PUBKEY_TEST.test_filename,
@@ -547,40 +387,40 @@ impl TfhersVersion for V0_8 {
);
// Serialize them
store_versioned_test!(&ct1, &dir, &HL_CT1_TEST.test_filename);
store_versioned_test!(&ct2, &dir, &HL_CT2_TEST.test_filename);
store_versioned_test!(
store_versioned_test(&ct1, &dir, &HL_CT1_TEST.test_filename);
store_versioned_test(&ct2, &dir, &HL_CT2_TEST.test_filename);
store_versioned_test(
&compressed_ct1,
&dir,
&HL_COMPRESSED_SEEDED_CT_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&compressed_ct2,
&dir,
&HL_COMPRESSED_CT_MODSWITCHED_TEST.test_filename,
);
store_versioned_test!(&ct1_signed, &dir, &HL_SIGNED_CT1_TEST.test_filename);
store_versioned_test!(&ct2_signed, &dir, &HL_SIGNED_CT2_TEST.test_filename);
store_versioned_test!(
store_versioned_test(&ct1_signed, &dir, &HL_SIGNED_CT1_TEST.test_filename);
store_versioned_test(&ct2_signed, &dir, &HL_SIGNED_CT2_TEST.test_filename);
store_versioned_test(
&compressed_ct1_signed,
&dir,
&HL_SIGNED_COMPRESSED_SEEDED_CT_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&compressed_ct2_signed,
&dir,
&HL_SIGNED_COMPRESSED_CT_MODSWITCHED_TEST.test_filename,
);
store_versioned_test!(&bool1, &dir, &HL_BOOL1_TEST.test_filename);
store_versioned_test!(&bool2, &dir, &HL_BOOL2_TEST.test_filename);
store_versioned_test!(
store_versioned_test(&bool1, &dir, &HL_BOOL1_TEST.test_filename);
store_versioned_test(&bool2, &dir, &HL_BOOL2_TEST.test_filename);
store_versioned_test(
&compressed_bool1,
&dir,
&HL_COMPRESSED_BOOL_SEEDED_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&compressed_bool2,
&dir,
&HL_COMPRESSED_BOOL_MODSWITCHED_TEST.test_filename,
@@ -611,8 +451,8 @@ impl TfhersVersion for V0_8 {
{
// Generate a compact public key needed to create a compact list
let config =
tfhe_0_8::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM)
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.into())
tfhe::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM.convert())
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
@@ -633,19 +473,19 @@ impl TfhersVersion for V0_8 {
.unwrap();
// Store the associated client key to be able to decrypt the ciphertexts in the list
store_versioned_auxiliary!(
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST.key_filename
&HL_PROVEN_COMPACTLIST_TEST.key_filename,
);
store_versioned_auxiliary!(
store_versioned_auxiliary(
&compact_pub_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST
.proof_info
.unwrap()
.public_key_filename
.public_key_filename,
);
let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key);
@@ -667,13 +507,13 @@ impl TfhersVersion for V0_8 {
)
.unwrap();
store_versioned_test!(
store_versioned_test(
crs.public_params(),
&dir,
&ZK_PKE_PUBLIC_PARAMS_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&proven_list_packed,
&dir,
&HL_PROVEN_COMPACTLIST_TEST.test_filename,
@@ -710,19 +550,19 @@ impl TfhersVersion for V0_8 {
));
let compressed_list = compressed_builder.build().unwrap();
store_versioned_test!(
store_versioned_test(
&compact_list_packed,
&dir,
&HL_PACKED_COMPACTLIST_TEST.test_filename,
);
store_versioned_test!(&compact_list, &dir, &HL_COMPACTLIST_TEST.test_filename);
store_versioned_test!(
store_versioned_test(&compact_list, &dir, &HL_COMPACTLIST_TEST.test_filename);
store_versioned_test(
&compressed_list,
&dir,
&HL_COMPRESSED_LIST_TEST.test_filename,
);
store_versioned_test!(
store_versioned_test(
&hl_client_key,
&dir,
&HL_CLIENTKEY_WITH_COMPRESSION_TEST.test_filename,

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_0_8::V0_8;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "0.8");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V0_8>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,200 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution,
GlweDimension, LweCiphertextCount, LweDimension, PolynomialSize, StandardDev, TUniform,
UnsignedInteger,
};
use tfhe::shortint::parameters::{CompressionParameters, LweBskGroupingFactor};
use tfhe::shortint::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, EncryptionKeyChoice, MaxNoiseLevel,
MessageModulus, MultiBitPBSParameters, PBSParameters,
};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::{
generic_store_versioned_auxiliary, generic_store_versioned_test,
};
use tfhe_backward_compat_data::{
TestClassicParameterSet, TestCompressionParameterSet, TestDistribution,
TestMultiBitParameterSet, TestParameterSet,
};
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params: _,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<PBSParameters> for TestParameterSet {
fn convert(self) -> PBSParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.convert())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}

View File

@@ -0,0 +1,20 @@
[package]
name = "generate_1_0"
edition = "2024"
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.0.0", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.0.0" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,112 @@
mod utils;
use tfhe::Seed;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::commons::math::random::RandomGenerator;
use tfhe::core_crypto::prelude::{DefaultRandomGenerator, TUniform};
use tfhe::shortint::CiphertextModulus;
use tfhe::shortint::engine::ShortintEngine;
use tfhe::shortint::parameters::{LweCiphertextCount, LweDimension};
use tfhe::zk::{CompactPkeCrs, ZkMSBZeroPaddingBitCount};
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_ms_noise_reduction"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ms_noise_reduction"),
client_key_filename: Cow::Borrowed("client_key_ms_noise_reduction.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
const ZK_PKEV2_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pkev2_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
pub struct V1_0;
impl TfhersVersion for V1_0 {
const VERSION_NUMBER: &'static str = "1.0";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config = tfhe::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST.parameters.convert(),
)
.build();
let (hl_client_key, hl_server_key) = tfhe::generate_keys(config);
store_versioned_test(
&hl_client_key,
&dir,
&HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST.test_filename,
);
store_versioned_test(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_TEST.test_filename,
);
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
let zkv2_crs = CompactPkeCrs::new(
LweDimension(ZK_PKEV2_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKEV2_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKEV2_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKEV2_CRS_TEST.ciphertext_modulus),
ZK_PKEV2_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKEV2_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
store_versioned_test(&zkv2_crs, &dir, &ZK_PKEV2_CRS_TEST.test_filename);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::ZkPkePublicParams(ZK_PKEV2_CRS_TEST),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_1_0::V1_0;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "1.0");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V1_0>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,228 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CompressionParameters,
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice,
GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, NoiseEstimationMeasureBound, PolynomialSize, RSigmaFactor,
StandardDev, Variance,
};
use tfhe::shortint::{MultiBitPBSParameters, PBSParameters};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ModulusSwitchNoiseReductionParams> for TestModulusSwitchNoiseReductionParams {
fn convert(self) -> ModulusSwitchNoiseReductionParams {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = self;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl ConvertParams<Option<ModulusSwitchNoiseReductionParams>> for TestModulusSwitchType {
fn convert(self) -> Option<ModulusSwitchNoiseReductionParams> {
match self {
TestModulusSwitchType::Standard => None,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => Some(test_modulus_switch_noise_reduction_params.convert()),
TestModulusSwitchType::CenteredMeanNoiseReduction => panic!("Not supported"),
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<PBSParameters> for TestParameterSet {
fn convert(self) -> PBSParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.convert())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}

View File

@@ -0,0 +1,20 @@
[package]
name = "generate_1_1"
edition = "2024"
version = "0.9.0"
license = "BSD-3-Clause-Clear"
[dependencies]
clap = { version = "4.5", features = ["derive"] }
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.1.0", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.1.0" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,178 @@
mod utils;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::prelude::DefaultRandomGenerator;
use tfhe::prelude::{FheEncrypt, SquashNoise};
use tfhe::shortint::engine::ShortintEngine;
use tfhe::{CompressedServerKey, FheBool, FheInt64, FheUint64, Seed, set_server_key};
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_with_noise_squashing"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing"),
client_key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
rerand_cpk_filename: None,
compressed: false,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing_compressed"),
client_key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
rerand_cpk_filename: None,
compressed: true,
};
const HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST: HlSquashedNoiseUnsignedCiphertextTest =
HlSquashedNoiseUnsignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_unsigned_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: 42,
};
const HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST: HlSquashedNoiseSignedCiphertextTest =
HlSquashedNoiseSignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_signed_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: -37,
};
const HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST: HlSquashedNoiseBoolCiphertextTest =
HlSquashedNoiseBoolCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_bool_false_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: false,
};
const HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST: HlSquashedNoiseBoolCiphertextTest =
HlSquashedNoiseBoolCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_bool_true_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: true,
};
pub struct V1_1;
impl TfhersVersion for V1_1 {
const VERSION_NUMBER: &'static str = "1.1";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config = tfhe::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.parameters.convert(),
)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.convert(),
)
.build();
let (hl_client_key, hl_server_key) = tfhe::generate_keys(config);
set_server_key(hl_server_key.clone());
let ct_unsigned = FheUint64::encrypt(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_signed = FheInt64::encrypt(
HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_false = FheBool::encrypt(
HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_true = FheBool::encrypt(
HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_unsigned = ct_unsigned.squash_noise().unwrap();
let ct_signed = ct_signed.squash_noise().unwrap();
let ct_false = ct_false.squash_noise().unwrap();
let ct_true = ct_true.squash_noise().unwrap();
store_versioned_test(
&hl_client_key,
&dir,
&HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
);
store_versioned_test(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_TEST.test_filename,
);
store_versioned_test(
&ct_unsigned,
&dir,
&HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test(
&ct_signed,
&dir,
&HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test(
&ct_false,
&dir,
&HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test(
&ct_true,
&dir,
&HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST.test_filename,
);
let compressed_hl_server_key = CompressedServerKey::new(&hl_client_key);
store_versioned_test(
&compressed_hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST.test_filename,
);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST),
TestMetadata::HlSquashedNoiseUnsignedCiphertext(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST,
),
TestMetadata::HlSquashedNoiseSignedCiphertext(HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST),
TestMetadata::HlSquashedNoiseBoolCiphertext(
HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST,
),
TestMetadata::HlSquashedNoiseBoolCiphertext(
HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST,
),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_1_1::V1_1;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "1.1");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V1_1>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,256 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CompressionParameters,
CoreCiphertextModulus, DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution,
EncryptionKeyChoice, GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel,
MessageModulus, ModulusSwitchNoiseReductionParams, NoiseEstimationMeasureBound,
NoiseSquashingParameters, PolynomialSize, RSigmaFactor, StandardDev, Variance,
};
use tfhe::shortint::{MultiBitPBSParameters, PBSParameters};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ModulusSwitchNoiseReductionParams> for TestModulusSwitchNoiseReductionParams {
fn convert(self) -> ModulusSwitchNoiseReductionParams {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = self;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl ConvertParams<Option<ModulusSwitchNoiseReductionParams>> for TestModulusSwitchType {
fn convert(self) -> Option<ModulusSwitchNoiseReductionParams> {
match self {
TestModulusSwitchType::Standard => None,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => Some(test_modulus_switch_noise_reduction_params.convert()),
TestModulusSwitchType::CenteredMeanNoiseReduction => panic!("Not supported"),
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<PBSParameters> for TestParameterSet {
fn convert(self) -> PBSParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.convert())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.convert())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}
impl ConvertParams<NoiseSquashingParameters> for TestNoiseSquashingParams {
fn convert(self) -> NoiseSquashingParameters {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.convert(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}

View File

@@ -0,0 +1,20 @@
[package]
name = "generate_1_3"
edition = "2024"
license.workspace = true
version.workspace = true
[dependencies]
clap.workspace = true
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.3.0", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.3.0" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,327 @@
mod utils;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::boolean::prelude::LweDimension;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::commons::math::random::RandomGenerator;
use tfhe::core_crypto::prelude::{CiphertextModulus, DefaultRandomGenerator, TUniform};
use tfhe::prelude::{FheEncrypt, SquashNoise};
use tfhe::shortint::engine::ShortintEngine;
use tfhe::shortint::parameters::LweCiphertextCount;
use tfhe::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe::{
ClientKey, CompactPublicKey, CompressedServerKey, CompressedSquashedNoiseCiphertextList,
FheBool, FheInt32, FheUint8, FheUint32, ProvenCompactCiphertextList, Seed, ServerKey,
set_server_key,
};
use utils::*;
use std::borrow::Cow;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const ZK_PKE_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pke_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
const HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH: HlHeterogeneousCiphertextListTest =
HlHeterogeneousCiphertextListTest {
test_filename: Cow::Borrowed("hl_proven_heterogeneous_list_zkv2_fasthash"),
key_filename: Cow::Borrowed("client_key"),
clear_values: Cow::Borrowed(&[17u8 as u64, -12i8 as u64, false as u64, true as u64]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
compressed: false,
proof_info: Some(PkeZkProofAuxiliaryInfo {
public_key_filename: Cow::Borrowed("public_key"),
params_filename: ZK_PKE_CRS_TEST.test_filename,
metadata: Cow::Borrowed("2vdrawkcab"),
}),
};
const HL_CLIENTKEY_MS_MEAN_COMPENSATION: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_ms_mean_compensation"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION,
};
const HL_SERVERKEY_MS_MEAN_COMPENSATION: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ms_mean_compensation"),
client_key_filename: Cow::Borrowed("client_key_ms_mean_compensation.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
const HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST: HlCompressedSquashedNoiseCiphertextListTest =
HlCompressedSquashedNoiseCiphertextListTest {
test_filename: Cow::Borrowed("hl_compressed_squashed_noise_ciphertext_list"),
key_filename: Cow::Borrowed("client_key_with_noise_squashing"),
clear_values: Cow::Borrowed(&[
54679568u32 as u64,
-12396372i32 as u64,
12396372i32 as u64,
false as u64,
true as u64,
]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
};
const CLIENT_KEY_KS32_FILENAME: &str = "client_key_ks32";
const CLIENT_KEY_KS32_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
parameters: VALID_TEST_PARAMS_KS32_TUNIFORM,
};
const SERVER_KEY_KS32_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ks32"),
client_key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
rerand_cpk_filename: None,
compressed: false,
};
const COMPRESSED_SERVER_KEY_KS32_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key_ks32"),
client_key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
rerand_cpk_filename: None,
compressed: true,
};
const CT_KS32_TEST: HlCiphertextTest = HlCiphertextTest {
test_filename: Cow::Borrowed("ct_ks32"),
key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
compressed: false,
clear_value: 25,
};
pub struct V1_3;
impl TfhersVersion for V1_3 {
const VERSION_NUMBER: &'static str = "1.3";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
// Generate a compact public key needed to create a compact list
let config =
tfhe::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let compact_pub_key = CompactPublicKey::new(&hl_client_key);
let crs = CompactPkeCrs::new(
LweDimension(ZK_PKE_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKE_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKE_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKE_CRS_TEST.ciphertext_modulus),
ZK_PKE_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKE_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
// Store the crs
store_versioned_auxiliary(&crs, &dir, &ZK_PKE_CRS_TEST.test_filename);
// Store the associated client key to be able to decrypt the ciphertexts in the list
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.key_filename,
);
store_versioned_auxiliary(
&compact_pub_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH
.proof_info
.unwrap()
.public_key_filename,
);
let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key);
proven_builder
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[0] as u8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[1] as i8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[2] != 0)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[3] != 0);
let proven_list_packed = proven_builder
.build_with_proof_packed(
&crs,
HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH
.proof_info
.unwrap()
.metadata
.as_bytes(),
ZkComputeLoad::Verify,
)
.unwrap();
store_versioned_test(
&proven_list_packed,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.test_filename,
);
let config = tfhe::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_MS_MEAN_COMPENSATION.parameters.convert(),
)
.build();
let (hl_client_key, hl_server_key) = tfhe::generate_keys(config);
store_versioned_test(
&hl_client_key,
&dir,
&HL_CLIENTKEY_MS_MEAN_COMPENSATION.test_filename,
);
store_versioned_test(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_MEAN_COMPENSATION.test_filename,
);
// Generate data for the squashed noise compressed ciphertext list
{
let config = tfhe::ConfigBuilder::with_custom_parameters(
INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION.convert(),
)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.convert(),
)
.enable_noise_squashing_compression(TEST_PARAMS_NOISE_SQUASHING_COMPRESSION.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let input_a = FheUint32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[0] as u32,
&hl_client_key,
);
let input_b = FheInt32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[1] as i32,
&hl_client_key,
);
let input_c = FheInt32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[2] as i32,
&hl_client_key,
);
let input_d = FheBool::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[3] != 0,
&hl_client_key,
);
let input_e = FheBool::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[4] != 0,
&hl_client_key,
);
let ns_a = input_a.squash_noise().unwrap();
let ns_b = input_b.squash_noise().unwrap();
let ns_c = input_c.squash_noise().unwrap();
let ns_d = input_d.squash_noise().unwrap();
let ns_e = input_e.squash_noise().unwrap();
let compressed_list = CompressedSquashedNoiseCiphertextList::builder()
.push(ns_a)
.push(ns_b)
.push(ns_c)
.push(ns_d)
.push(ns_e)
.build()
.unwrap();
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.key_filename,
);
store_versioned_test(
&compressed_list,
&dir,
&HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.test_filename,
);
};
// Generate data for the KS32 AP
{
let config = tfhe::ConfigBuilder::default()
.use_custom_parameters(CLIENT_KEY_KS32_TEST.parameters.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let compressed_server_key = CompressedServerKey::new(&hl_client_key);
let hl_server_key = compressed_server_key.decompress();
let ct = FheUint8::encrypt(CT_KS32_TEST.clear_value, &hl_client_key);
store_versioned_test(&hl_client_key, &dir, &CLIENT_KEY_KS32_TEST.test_filename);
store_versioned_test(&hl_server_key, &dir, &SERVER_KEY_KS32_TEST.test_filename);
store_versioned_test(
&compressed_server_key,
&dir,
&COMPRESSED_SERVER_KEY_KS32_TEST.test_filename,
);
store_versioned_test(&ct, &dir, &CT_KS32_TEST.test_filename);
}
vec![
TestMetadata::HlHeterogeneousCiphertextList(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH),
TestMetadata::HlClientKey(HL_CLIENTKEY_MS_MEAN_COMPENSATION),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_MEAN_COMPENSATION),
TestMetadata::HlCompressedSquashedNoiseCiphertextList(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST,
),
TestMetadata::HlClientKey(CLIENT_KEY_KS32_TEST),
TestMetadata::HlServerKey(SERVER_KEY_KS32_TEST),
TestMetadata::HlServerKey(COMPRESSED_SERVER_KEY_KS32_TEST),
TestMetadata::HlCiphertext(CT_KS32_TEST),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_1_3::V1_3;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "1.3");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V1_3>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,338 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, CiphertextModulus32, ClassicPBSParameters,
CompressionParameters, CoreCiphertextModulus, DecompositionBaseLog, DecompositionLevelCount,
DynamicDistribution, EncryptionKeyChoice, GlweDimension, KeySwitch32PBSParameters,
LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, NoiseEstimationMeasureBound,
NoiseSquashingCompressionParameters, NoiseSquashingParameters, PolynomialSize, RSigmaFactor,
StandardDev, Variance,
};
use tfhe::shortint::prelude::ModulusSwitchType;
use tfhe::shortint::{AtomicPatternParameters, MultiBitPBSParameters};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ModulusSwitchNoiseReductionParams> for TestModulusSwitchNoiseReductionParams {
fn convert(self) -> ModulusSwitchNoiseReductionParams {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = self;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl ConvertParams<ModulusSwitchType> for TestModulusSwitchType {
fn convert(self) -> ModulusSwitchType {
match self {
TestModulusSwitchType::Standard => ModulusSwitchType::Standard,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => ModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params.convert(),
),
TestModulusSwitchType::CenteredMeanNoiseReduction => {
ModulusSwitchType::CenteredMeanNoiseReduction
}
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<KeySwitch32PBSParameters> for TestKS32ParameterSet {
fn convert(self) -> KeySwitch32PBSParameters {
let TestKS32ParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
modulus_switch_noise_reduction_params,
post_keyswitch_ciphertext_modulus,
} = self;
KeySwitch32PBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
post_keyswitch_ciphertext_modulus: CiphertextModulus32::try_new(
post_keyswitch_ciphertext_modulus,
)
.unwrap(),
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<AtomicPatternParameters> for TestParameterSet {
fn convert(self) -> AtomicPatternParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
AtomicPatternParameters::Standard(test_classic_parameter_set.convert().into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
AtomicPatternParameters::Standard(test_parameter_set_multi_bit.convert().into())
}
TestParameterSet::TestKS32ParameterSet(test_parameter_set_ks32) => {
AtomicPatternParameters::KeySwitch32(test_parameter_set_ks32.convert())
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}
impl ConvertParams<NoiseSquashingParameters> for TestNoiseSquashingParams {
fn convert(self) -> NoiseSquashingParameters {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.convert(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}
impl ConvertParams<NoiseSquashingCompressionParameters>
for TestNoiseSquashingCompressionParameters
{
fn convert(self) -> NoiseSquashingCompressionParameters {
let TestNoiseSquashingCompressionParameters {
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
packing_ks_key_noise_distribution,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingCompressionParameters {
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}

View File

@@ -0,0 +1,20 @@
[package]
name = "generate_1_4"
edition = "2024"
license.workspace = true
version.workspace = true
[dependencies]
clap.workspace = true
# TFHE-rs
tfhe = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.4.1", features = [
"boolean",
"integer",
"shortint",
"zk-pok",
"experimental-force_fft_algo_dif4",
] }
tfhe-versionable = { git = "https://github.com/zama-ai/tfhe-rs.git", tag = "tfhe-rs-1.4.1" }
tfhe-backward-compat-data = { path = "../.." }

View File

@@ -0,0 +1,248 @@
mod utils;
use tfhe::boolean::engine::BooleanEngine;
use tfhe::core_crypto::commons::generators::DeterministicSeeder;
use tfhe::core_crypto::prelude::DefaultRandomGenerator;
use tfhe::prelude::{FheEncrypt, SquashNoise};
use tfhe::shortint::engine::ShortintEngine;
use tfhe::{
ClientKey, CompressedCompactPublicKey, CompressedServerKey, ConfigBuilder, FheUint32,
FheUint64, KVStore, Seed, ServerKey, set_server_key,
};
use utils::*;
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs::create_dir_all;
use std::path::Path;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
const TEST_FILENAME: Cow<'static, str> = Cow::Borrowed("client_key_with_noise_squashing");
const HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: TEST_FILENAME,
parameters: INSECURE_SMALL_TEST_PARAMS_MULTI_BIT,
};
const HL_SERVERKEY_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing"),
client_key_filename: TEST_FILENAME,
rerand_cpk_filename: None,
compressed: false,
};
const HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST: HlSquashedNoiseUnsignedCiphertextTest =
HlSquashedNoiseUnsignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_unsigned_ciphertext"),
key_filename: TEST_FILENAME,
clear_value: 42,
};
const HL_SERVERKEY_RERAND_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_for_rerand"),
client_key_filename: Cow::Borrowed("client_key_for_rerand"),
rerand_cpk_filename: Some(Cow::Borrowed("cpk_for_rerand")),
compressed: false,
};
const HL_COMPRESSED_KV_STORE_TEST: HlCompressedKVStoreTest = HlCompressedKVStoreTest {
kv_store_file_name: Cow::Borrowed("compressed_kv_store"),
client_key_file_name: Cow::Borrowed("client_key_for_kv_store"),
server_key_file_name: Cow::Borrowed("server_key_for_kv_store"),
num_elements: 512,
};
const HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ks32_noise_squashing"),
client_key_filename: Cow::Borrowed("client_key_ks32_noise_squashing"),
rerand_cpk_filename: None,
compressed: false,
};
const HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key_ks32_noise_squashing"),
client_key_filename: Cow::Borrowed("client_key_ks32_noise_squashing"),
rerand_cpk_filename: None,
compressed: true,
};
pub struct V1_4;
impl TfhersVersion for V1_4 {
const VERSION_NUMBER: &'static str = "1.4";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data<P: AsRef<Path>>(_base_data_dir: P) -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata> {
let dir = Self::data_dir(base_data_dir).join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
// Test noise squahsing multibit
{
let config = ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.parameters.convert(),
)
.enable_noise_squashing(INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MULTI_BIT.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let input = FheUint32::encrypt(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.clear_value as u32,
&hl_client_key,
);
let ns = input.squash_noise().unwrap();
store_versioned_test(
&hl_client_key,
&dir,
&HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
);
store_versioned_test(&hl_server_key, &dir, &HL_SERVERKEY_TEST.test_filename);
store_versioned_test(
&ns,
&dir,
&HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.test_filename,
);
}
// Test re-randomization
{
let params = INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION;
let cpk_params = (
INSECURE_DEDICATED_CPK_TEST_PARAMS.convert(),
KS_TO_SMALL_TEST_PARAMS.convert(),
);
let re_rand_ks_params = KS_TO_BIG_TEST_PARAMS;
let config = ConfigBuilder::with_custom_parameters(params.convert())
.use_dedicated_compact_public_key_parameters(cpk_params)
.enable_ciphertext_re_randomization(re_rand_ks_params.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
let hl_public_key = CompressedCompactPublicKey::new(&hl_client_key);
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.client_key_filename,
);
store_versioned_auxiliary(
&hl_public_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.rerand_cpk_filename.unwrap(),
);
store_versioned_test(
&hl_server_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.test_filename,
);
}
// Test CompressedKVStore
{
let config =
ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM.convert())
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.convert())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let mut clear_store = HashMap::new();
let mut store = KVStore::new();
for key in 0..HL_COMPRESSED_KV_STORE_TEST.num_elements as u32 {
let value = u64::MAX - u64::from(key);
let encrypted = FheUint64::encrypt(value, &hl_client_key);
let _ = clear_store.insert(key, value);
let _ = store.insert_with_clear_key(key, encrypted);
}
let compressed_kv_store = store.compress().unwrap();
store_versioned_auxiliary(
&hl_client_key,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.client_key_file_name,
);
store_versioned_auxiliary(
&hl_server_key,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.server_key_file_name,
);
store_versioned_test(
&compressed_kv_store,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.kv_store_file_name,
);
}
{
let config = tfhe::ConfigBuilder::with_custom_parameters(
INSECURE_SMALL_TEST_PARAMS_KS32.convert(),
)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.convert(),
)
.build();
let client_key = ClientKey::generate(config);
let compressed_server_key = CompressedServerKey::new(&client_key);
let server_key = compressed_server_key.decompress();
store_versioned_auxiliary(
&client_key,
&dir,
&HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST.client_key_filename,
);
store_versioned_test(
&compressed_server_key,
&dir,
&HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST.test_filename,
);
store_versioned_test(
&server_key,
&dir,
&HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST.test_filename,
);
}
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_TEST),
TestMetadata::HlSquashedNoiseUnsignedCiphertext(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST,
),
TestMetadata::HlServerKey(HL_SERVERKEY_RERAND_TEST),
TestMetadata::HlCompressedKVStoreTest(HL_COMPRESSED_KV_STORE_TEST),
TestMetadata::HlServerKey(HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST),
]
}
}

View File

@@ -0,0 +1,36 @@
use std::fs::remove_dir_all;
use std::path::PathBuf;
use clap::Parser;
use generate_1_4::V1_4;
use tfhe_backward_compat_data::dir_for_version;
use tfhe_backward_compat_data::generate::{
display_metadata, gen_all_data, update_metadata_for_version,
};
#[derive(Parser, Debug)]
struct Args {
/// The path where the backward data should be stored
#[arg(long)]
data_path: PathBuf,
/// Output metadata to stdout instead of writing them to the ron file
#[arg(long, action)]
stdout: bool,
}
fn main() {
let args = Args::parse();
let version_dir = dir_for_version(&args.data_path, "1.4");
// Ignore if directory does not exist
let _ = remove_dir_all(&version_dir);
let data = gen_all_data::<V1_4>(&args.data_path);
if args.stdout {
display_metadata(&data);
} else {
update_metadata_for_version(data, args.data_path);
}
}

View File

@@ -0,0 +1,407 @@
use std::path::Path;
use tfhe::core_crypto::prelude::{
CiphertextModulusLog, LweCiphertextCount, TUniform, UnsignedInteger,
};
use tfhe::shortint::parameters::noise_squashing::NoiseSquashingMultiBitParameters;
use tfhe::shortint::parameters::{
CarryModulus, CiphertextModulus, CiphertextModulus32, ClassicPBSParameters,
CompactCiphertextListExpansionKind, CompactPublicKeyEncryptionParameters,
CompressionParameters, CoreCiphertextModulus, DecompositionBaseLog, DecompositionLevelCount,
DynamicDistribution, EncryptionKeyChoice, GlweDimension, KeySwitch32PBSParameters,
LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, NoiseEstimationMeasureBound,
NoiseSquashingClassicParameters, NoiseSquashingCompressionParameters, NoiseSquashingParameters,
PolynomialSize, RSigmaFactor, ShortintKeySwitchingParameters, StandardDev,
SupportedCompactPkeZkScheme, Variance,
};
use tfhe::shortint::prelude::ModulusSwitchType;
use tfhe::shortint::{AtomicPatternParameters, MultiBitPBSParameters};
use tfhe_versionable::Versionize;
use tfhe_backward_compat_data::generate::*;
use tfhe_backward_compat_data::*;
pub(crate) fn store_versioned_test<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_test(Versionize::versionize, msg, dir, test_filename)
}
#[allow(dead_code)]
pub(crate) fn store_versioned_auxiliary<Data: Versionize + 'static, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
generic_store_versioned_auxiliary(Versionize::versionize, msg, dir, test_filename)
}
/// This trait allows to convert version independent parameters types defined in
/// `tfhe-backward-compat-data` to the equivalent TFHE-rs parameters for this version.
///
/// This is similar to `Into` but allows to circumvent the orphan rule.
pub(crate) trait ConvertParams<TfheRsParams> {
fn convert(self) -> TfheRsParams;
}
impl<Scalar> ConvertParams<DynamicDistribution<Scalar>> for TestDistribution
where
Scalar: UnsignedInteger,
{
fn convert(self) -> DynamicDistribution<Scalar> {
match self {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::TUniform(TUniform::new(bound_log2))
}
}
}
}
impl ConvertParams<ModulusSwitchNoiseReductionParams> for TestModulusSwitchNoiseReductionParams {
fn convert(self) -> ModulusSwitchNoiseReductionParams {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = self;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl ConvertParams<ModulusSwitchType> for TestModulusSwitchType {
fn convert(self) -> ModulusSwitchType {
match self {
TestModulusSwitchType::Standard => ModulusSwitchType::Standard,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => ModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params.convert(),
),
TestModulusSwitchType::CenteredMeanNoiseReduction => {
ModulusSwitchType::CenteredMeanNoiseReduction
}
}
}
}
impl ConvertParams<ClassicPBSParameters> for TestClassicParameterSet {
fn convert(self) -> ClassicPBSParameters {
let TestClassicParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
modulus_switch_noise_reduction_params,
} = self;
ClassicPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<MultiBitPBSParameters> for TestMultiBitParameterSet {
fn convert(self) -> MultiBitPBSParameters {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = self;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl ConvertParams<KeySwitch32PBSParameters> for TestKS32ParameterSet {
fn convert(self) -> KeySwitch32PBSParameters {
let TestKS32ParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
modulus_switch_noise_reduction_params,
post_keyswitch_ciphertext_modulus,
} = self;
KeySwitch32PBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.convert(),
glwe_noise_distribution: glwe_noise_distribution.convert(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
post_keyswitch_ciphertext_modulus: CiphertextModulus32::try_new(
post_keyswitch_ciphertext_modulus,
)
.unwrap(),
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
}
}
}
impl ConvertParams<AtomicPatternParameters> for TestParameterSet {
fn convert(self) -> AtomicPatternParameters {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
AtomicPatternParameters::Standard(test_classic_parameter_set.convert().into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
AtomicPatternParameters::Standard(test_parameter_set_multi_bit.convert().into())
}
TestParameterSet::TestKS32ParameterSet(test_parameter_set_ks32) => {
AtomicPatternParameters::KeySwitch32(test_parameter_set_ks32.convert())
}
}
}
}
impl ConvertParams<CompressionParameters> for TestCompressionParameterSet {
fn convert(self) -> CompressionParameters {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = self;
CompressionParameters {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
}
}
}
impl ConvertParams<NoiseSquashingParameters> for TestNoiseSquashingParams {
fn convert(self) -> NoiseSquashingParameters {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingParameters::Classic(NoiseSquashingClassicParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.convert(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.convert(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
})
}
}
impl ConvertParams<NoiseSquashingParameters> for TestNoiseSquashingParamsMultiBit {
fn convert(self) -> NoiseSquashingParameters {
let TestNoiseSquashingParamsMultiBit {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
grouping_factor,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingParameters::MultiBit(NoiseSquashingMultiBitParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.convert(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
grouping_factor: LweBskGroupingFactor(grouping_factor),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
deterministic_execution: false,
})
}
}
impl ConvertParams<ShortintKeySwitchingParameters> for TestKeySwitchingParams {
fn convert(self) -> ShortintKeySwitchingParameters {
ShortintKeySwitchingParameters {
ks_level: DecompositionLevelCount(self.ks_level),
ks_base_log: DecompositionBaseLog(self.ks_base_log),
destination_key: match &*self.destination_key {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
},
}
}
}
impl ConvertParams<CompactPublicKeyEncryptionParameters>
for TestCompactPublicKeyEncryptionParameters
{
fn convert(self) -> CompactPublicKeyEncryptionParameters {
CompactPublicKeyEncryptionParameters {
encryption_lwe_dimension: LweDimension(self.encryption_lwe_dimension),
encryption_noise_distribution: self.encryption_noise_distribution.convert(),
message_modulus: MessageModulus(self.message_modulus as u64),
carry_modulus: CarryModulus(self.carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(self.ciphertext_modulus).unwrap(),
expansion_kind: match &*self.expansion_kind {
"requires_casting" => CompactCiphertextListExpansionKind::RequiresCasting,
_ => panic!("Invalid expansion kind"),
},
zk_scheme: match &*self.zk_scheme {
"zkv1" => SupportedCompactPkeZkScheme::V1,
"zkv2" => SupportedCompactPkeZkScheme::V2,
_ => panic!("Invalid zk scheme"),
},
}
}
}
impl ConvertParams<NoiseSquashingCompressionParameters>
for TestNoiseSquashingCompressionParameters
{
fn convert(self) -> NoiseSquashingCompressionParameters {
let TestNoiseSquashingCompressionParameters {
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
packing_ks_key_noise_distribution,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = self;
NoiseSquashingCompressionParameters {
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.convert(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}

View File

@@ -0,0 +1,60 @@
#!/usr/bin/env bash
# This script generates backward compatibility data, with an optional version argument.
# If no argument is provided, it will re-generate all data.
# If a version (e.g., "1.2") is provided, it will only generate data for this version.
set -e
WORKSPACE_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
VERSION_ARG=$1
DATA_DIR=$WORKSPACE_DIR/data
show_help() {
echo "Usage: $0 [version]"
echo
echo "Generate backward compatibility data."
echo
echo "[version] specifies a TFHE-rs version to generate data for."
echo "This should be provided in 'major.minor' format (e.g., '1.4')."
echo
echo "If no argument is provided, the script will generate data for all supported versions."
}
cd $WORKSPACE_DIR
# If no argument is provided, generate all data
if [ -z "$VERSION_ARG" ]; then
echo "No version provided."
echo "Re-generating all data"
(set -x; cargo run --release -- --data-path $DATA_DIR)
exit
fi
# Display help if argument is -h or --help
if [[ "$VERSION_ARG" == "-h" ]] || [[ "$VERSION_ARG" == "--help" ]]; then
show_help
exit 0
fi
# Check that the provided version is X.Y
if [[ ! "$VERSION_ARG" =~ ^[0-9]+\.[0-9]+$ ]]; then
echo "Error: Invalid version format." >&2
echo "Please use the format 'major.minor', for example: '1.4' or '0.8'." >&2
echo
show_help
exit 1
fi
PACKAGE_VERSION=$(echo "$VERSION_ARG" | tr '.' '_')
if [[ ! -d "crates/generate_$PACKAGE_VERSION" ]]; then
echo "Error: Data generation code for TFHE-rs $VERSION_ARG not found." >&2
echo "Please TODO GENERATE FROM TEMPLATE." >&2
exit 1
fi
echo "Generating data for TFHE-rs $VERSION_ARG"
# print the command and run it
(set -x; cargo run --manifest-path crates/generate_$PACKAGE_VERSION/Cargo.toml --release -- --data-path $DATA_DIR)

View File

@@ -1,230 +0,0 @@
use crate::generate::{
store_versioned_test_tfhe_0_10, TfhersVersion, VALID_TEST_PARAMS_TUNIFORM,
VALID_TEST_PARAMS_TUNIFORM_COMPRESSION,
};
use crate::{
HlClientKeyTest, HlServerKeyTest, TestClassicParameterSet, TestCompressionParameterSet,
TestDistribution, TestMetadata, TestMultiBitParameterSet, TestParameterSet, HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::fs::create_dir_all;
use tfhe_0_10::boolean::engine::BooleanEngine;
use tfhe_0_10::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_0_10::core_crypto::commons::math::random::ActivatedRandomGenerator;
use tfhe_0_10::core_crypto::prelude::{CiphertextModulusLog, LweCiphertextCount};
use tfhe_0_10::shortint::engine::ShortintEngine;
use tfhe_0_10::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CompressionParameters,
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice,
GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
MultiBitPBSParameters, PBSParameters, PolynomialSize, StandardDev,
};
use tfhe_0_10::{CompressedServerKey, Seed};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_0_10($msg, $dir, $test_filename)
};
}
impl From<TestDistribution> for DynamicDistribution<u64> {
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus),
carry_modulus: CarryModulus(value.carry_modulus),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus),
carry_modulus: CarryModulus(carry_modulus),
max_noise_level: MaxNoiseLevel::new(max_noise_level),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestParameterSet> for PBSParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.into())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl From<TestCompressionParameterSet> for CompressionParameters {
fn from(value: TestCompressionParameterSet) -> Self {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = value;
Self {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.into(),
}
}
}
const HL_CLIENTKEY_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key"),
parameters: VALID_TEST_PARAMS_TUNIFORM,
};
const HL_COMPRESSED_SERVERKEY_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key"),
client_key_filename: Cow::Borrowed("client_key.cbor"),
rerand_cpk_filename: None,
compressed: true,
};
const HL_SERVERKEY_WITH_COMPRESSION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_compression"),
client_key_filename: Cow::Borrowed("client_key.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
pub struct V0_10;
impl TfhersVersion for V0_10 {
const VERSION_NUMBER: &'static str = "0.10";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<ActivatedRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<crate::TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<crate::TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config = tfhe_0_10::ConfigBuilder::with_custom_parameters(HL_CLIENTKEY_TEST.parameters)
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.into())
.build();
let (hl_client_key, hl_server_key) = tfhe_0_10::generate_keys(config);
let compressed_server_key = CompressedServerKey::new(&hl_client_key);
store_versioned_test!(&hl_client_key, &dir, &HL_CLIENTKEY_TEST.test_filename);
store_versioned_test!(
&compressed_server_key,
&dir,
&HL_COMPRESSED_SERVERKEY_TEST.test_filename,
);
store_versioned_test!(
&hl_server_key,
&dir,
&HL_SERVERKEY_WITH_COMPRESSION_TEST.test_filename,
);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_TEST),
TestMetadata::HlServerKey(HL_COMPRESSED_SERVERKEY_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_WITH_COMPRESSION_TEST),
]
}
}

View File

@@ -1,281 +0,0 @@
use crate::generate::{
store_versioned_auxiliary_tfhe_0_11, store_versioned_test_tfhe_0_11, TfhersVersion, PRNG_SEED,
VALID_TEST_PARAMS_TUNIFORM,
};
use crate::{
DataKind, HlClientKeyTest, HlHeterogeneousCiphertextListTest, PkeZkProofAuxiliaryInfo,
TestClassicParameterSet, TestDistribution, TestMetadata, TestMultiBitParameterSet,
TestParameterSet, ZkPkePublicParamsTest, HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::fs::create_dir_all;
use tfhe_0_11::boolean::engine::BooleanEngine;
use tfhe_0_11::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_0_11::core_crypto::commons::math::random::{DefaultRandomGenerator, RandomGenerator};
use tfhe_0_11::core_crypto::prelude::{LweCiphertextCount, TUniform};
use tfhe_0_11::shortint::engine::ShortintEngine;
use tfhe_0_11::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, DecompositionBaseLog,
DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice, GlweDimension,
LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus, MultiBitPBSParameters,
PBSParameters, PolynomialSize, StandardDev,
};
use tfhe_0_11::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe_0_11::{
set_server_key, ClientKey, CompactPublicKey, ProvenCompactCiphertextList, Seed, ServerKey,
};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_0_11($msg, $dir, $test_filename)
};
}
macro_rules! store_versioned_auxiliary {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_auxiliary_tfhe_0_11($msg, $dir, $test_filename)
};
}
impl From<TestDistribution> for DynamicDistribution<u64> {
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level as u64),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestParameterSet> for PBSParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.into())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
const HL_CLIENTKEY_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key"),
parameters: VALID_TEST_PARAMS_TUNIFORM,
};
// The CRS is structurally equivalent to the public params type so we reuse the test
const ZK_PKE_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pke_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
const HL_PROVEN_COMPACTLIST_TEST_ZKV2: HlHeterogeneousCiphertextListTest =
HlHeterogeneousCiphertextListTest {
test_filename: Cow::Borrowed("hl_proven_heterogeneous_list_zkv2"),
key_filename: HL_CLIENTKEY_TEST.test_filename,
clear_values: Cow::Borrowed(&[17u8 as u64, -12i8 as u64, false as u64, true as u64]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
compressed: false,
proof_info: Some(PkeZkProofAuxiliaryInfo {
public_key_filename: Cow::Borrowed("public_key"),
params_filename: Cow::Borrowed("zk_pke_crs"),
metadata: Cow::Borrowed("2vdrawkcab"),
}),
};
pub struct V0_11;
impl TfhersVersion for V0_11 {
const VERSION_NUMBER: &'static str = "0.11";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
// Generate a compact public key needed to create a compact list
let config =
tfhe_0_11::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM).build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let compact_pub_key = CompactPublicKey::new(&hl_client_key);
let crs = CompactPkeCrs::new(
LweDimension(ZK_PKE_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKE_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKE_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKE_CRS_TEST.ciphertext_modulus),
ZK_PKE_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKE_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
store_versioned_test!(&crs, &dir, &ZK_PKE_CRS_TEST.test_filename,);
// Store the associated client key to be able to decrypt the ciphertexts in the list
store_versioned_auxiliary!(
&hl_client_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2.key_filename
);
store_versioned_auxiliary!(
&compact_pub_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2
.proof_info
.unwrap()
.public_key_filename
);
let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key);
proven_builder
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[0] as u8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[1] as i8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[2] != 0)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2.clear_values[3] != 0);
let proven_list_packed = proven_builder
.build_with_proof_packed(
&crs,
HL_PROVEN_COMPACTLIST_TEST_ZKV2
.proof_info
.unwrap()
.metadata
.as_bytes(),
ZkComputeLoad::Proof,
)
.unwrap();
store_versioned_test!(
&proven_list_packed,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2.test_filename,
);
vec![
TestMetadata::ZkPkePublicParams(ZK_PKE_CRS_TEST),
TestMetadata::HlHeterogeneousCiphertextList(HL_PROVEN_COMPACTLIST_TEST_ZKV2),
]
}
}

View File

@@ -1,261 +0,0 @@
use crate::generate::{
store_versioned_test_tfhe_1_0, TfhersVersion, INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
PRNG_SEED, VALID_TEST_PARAMS_TUNIFORM,
};
use crate::{
HlClientKeyTest, HlServerKeyTest, TestClassicParameterSet, TestDistribution, TestMetadata,
TestModulusSwitchNoiseReductionParams, TestModulusSwitchType, TestMultiBitParameterSet,
TestParameterSet, ZkPkePublicParamsTest, HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::fs::create_dir_all;
use tfhe_1_0::boolean::engine::BooleanEngine;
use tfhe_1_0::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_1_0::core_crypto::commons::math::random::{DefaultRandomGenerator, RandomGenerator};
use tfhe_1_0::core_crypto::prelude::{
LweCiphertextCount, NoiseEstimationMeasureBound, RSigmaFactor, TUniform, Variance,
};
use tfhe_1_0::shortint::engine::ShortintEngine;
use tfhe_1_0::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, DecompositionBaseLog,
DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice, GlweDimension,
LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, MultiBitPBSParameters, PBSParameters, PolynomialSize,
StandardDev,
};
use tfhe_1_0::zk::{CompactPkeCrs, ZkMSBZeroPaddingBitCount};
use tfhe_1_0::Seed;
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_1_0($msg, $dir, $test_filename)
};
}
impl From<TestDistribution> for DynamicDistribution<u64> {
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestModulusSwitchNoiseReductionParams> for ModulusSwitchNoiseReductionParams {
fn from(value: TestModulusSwitchNoiseReductionParams) -> Self {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = value;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
let modulus_switch_noise_reduction_params =
match value.modulus_switch_noise_reduction_params {
TestModulusSwitchType::Standard => None,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => Some(test_modulus_switch_noise_reduction_params.into()),
TestModulusSwitchType::CenteredMeanNoiseReduction => panic!("Not supported"),
};
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level as u64),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params,
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestParameterSet> for PBSParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.into())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
const HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_ms_noise_reduction"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ms_noise_reduction"),
client_key_filename: Cow::Borrowed("client_key_ms_noise_reduction.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
const ZK_PKEV2_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pkev2_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
pub struct V1_0;
impl TfhersVersion for V1_0 {
const VERSION_NUMBER: &'static str = "1.0";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config = tfhe_1_0::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST.parameters,
)
.build();
let (hl_client_key, hl_server_key) = tfhe_1_0::generate_keys(config);
store_versioned_test!(
&hl_client_key,
&dir,
&HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST.test_filename
);
store_versioned_test!(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_TEST.test_filename,
);
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
let zkv2_crs = CompactPkeCrs::new(
LweDimension(ZK_PKEV2_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKEV2_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKEV2_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKEV2_CRS_TEST.ciphertext_modulus),
ZK_PKEV2_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKEV2_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
store_versioned_test!(&zkv2_crs, &dir, &ZK_PKEV2_CRS_TEST.test_filename,);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::ZkPkePublicParams(ZK_PKEV2_CRS_TEST),
]
}
}

View File

@@ -1,366 +0,0 @@
use crate::generate::{
store_versioned_test_tfhe_1_1, TfhersVersion,
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION,
INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
};
use crate::{
HlClientKeyTest, HlServerKeyTest, HlSquashedNoiseBoolCiphertextTest,
HlSquashedNoiseSignedCiphertextTest, HlSquashedNoiseUnsignedCiphertextTest,
TestClassicParameterSet, TestDistribution, TestMetadata, TestModulusSwitchNoiseReductionParams,
TestModulusSwitchType, TestMultiBitParameterSet, TestNoiseSquashingParams, TestParameterSet,
HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::fs::create_dir_all;
use tfhe_1_1::boolean::engine::BooleanEngine;
use tfhe_1_1::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_1_1::core_crypto::commons::math::random::DefaultRandomGenerator;
use tfhe_1_1::core_crypto::prelude::{
LweCiphertextCount, NoiseEstimationMeasureBound, RSigmaFactor, UnsignedInteger, Variance,
};
use tfhe_1_1::prelude::*;
use tfhe_1_1::shortint::engine::ShortintEngine;
use tfhe_1_1::shortint::parameters::{
CarryModulus, CiphertextModulus, ClassicPBSParameters, CoreCiphertextModulus,
DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution, EncryptionKeyChoice,
GlweDimension, LweBskGroupingFactor, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, MultiBitPBSParameters, NoiseSquashingParameters,
PBSParameters, PolynomialSize, StandardDev,
};
use tfhe_1_1::{set_server_key, CompressedServerKey, FheBool, FheInt64, FheUint64, Seed};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_1_1($msg, $dir, $test_filename)
};
}
impl<Scalar: UnsignedInteger> From<TestDistribution> for DynamicDistribution<Scalar> {
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestModulusSwitchType> for Option<ModulusSwitchNoiseReductionParams> {
fn from(value: TestModulusSwitchType) -> Self {
let modulus_switch_noise_reduction_params = match value {
TestModulusSwitchType::Standard => return None,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => test_modulus_switch_noise_reduction_params,
TestModulusSwitchType::CenteredMeanNoiseReduction => panic!("Not supported"),
};
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = modulus_switch_noise_reduction_params;
Some(ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
})
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level as u64),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: value
.modulus_switch_noise_reduction_params
.into(),
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestParameterSet> for PBSParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
PBSParameters::PBS(test_classic_parameter_set.into())
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
PBSParameters::MultiBitPBS(test_parameter_set_multi_bit.into())
}
TestParameterSet::TestKS32ParameterSet(_) => {
panic!("unsupported ks32 parameters for version")
}
}
}
}
impl From<TestNoiseSquashingParams> for NoiseSquashingParameters {
fn from(value: TestNoiseSquashingParams) -> Self {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = value;
Self {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.into(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.into(),
message_modulus: MessageModulus(message_modulus.try_into().unwrap()),
carry_modulus: CarryModulus(carry_modulus.try_into().unwrap()),
ciphertext_modulus: if ciphertext_modulus == 0 {
CoreCiphertextModulus::new_native()
} else {
CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap()
},
}
}
}
const HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_with_noise_squashing"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing"),
client_key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
rerand_cpk_filename: None,
compressed: false,
};
const HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing_compressed"),
client_key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
rerand_cpk_filename: None,
compressed: true,
};
const HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST: HlSquashedNoiseUnsignedCiphertextTest =
HlSquashedNoiseUnsignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_unsigned_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: 42,
};
const HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST: HlSquashedNoiseSignedCiphertextTest =
HlSquashedNoiseSignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_signed_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: -37,
};
const HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST: HlSquashedNoiseBoolCiphertextTest =
HlSquashedNoiseBoolCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_bool_false_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: false,
};
const HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST: HlSquashedNoiseBoolCiphertextTest =
HlSquashedNoiseBoolCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_bool_true_ciphertext"),
key_filename: HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename,
clear_value: true,
};
pub struct V1_1;
impl TfhersVersion for V1_1 {
const VERSION_NUMBER: &'static str = "1.1";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let config = tfhe_1_1::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.parameters,
)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.into(),
)
.build();
let (hl_client_key, hl_server_key) = tfhe_1_1::generate_keys(config);
set_server_key(hl_server_key.clone());
let ct_unsigned = FheUint64::encrypt(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_signed = FheInt64::encrypt(
HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_false = FheBool::encrypt(
HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_true = FheBool::encrypt(
HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST.clear_value,
&hl_client_key,
);
let ct_unsigned = ct_unsigned.squash_noise().unwrap();
let ct_signed = ct_signed.squash_noise().unwrap();
let ct_false = ct_false.squash_noise().unwrap();
let ct_true = ct_true.squash_noise().unwrap();
store_versioned_test!(
&hl_client_key,
&dir,
&HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename
);
store_versioned_test!(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_TEST.test_filename,
);
store_versioned_test!(
&ct_unsigned,
&dir,
&HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test!(
&ct_signed,
&dir,
&HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test!(
&ct_false,
&dir,
&HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST.test_filename,
);
store_versioned_test!(
&ct_true,
&dir,
&HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST.test_filename,
);
let compressed_hl_server_key = CompressedServerKey::new(&hl_client_key);
store_versioned_test!(
&compressed_hl_server_key,
&dir,
&HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST.test_filename,
);
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_NOISE_REDUCTION_COMPRESSED_TEST),
TestMetadata::HlSquashedNoiseUnsignedCiphertext(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST,
),
TestMetadata::HlSquashedNoiseSignedCiphertext(HL_SQUASHED_NOISE_SIGNED_CIPHERTEXT_TEST),
TestMetadata::HlSquashedNoiseBoolCiphertext(
HL_SQUASHED_NOISE_BOOL_FALSE_CIPHERTEXT_TEST,
),
TestMetadata::HlSquashedNoiseBoolCiphertext(
HL_SQUASHED_NOISE_BOOL_TRUE_CIPHERTEXT_TEST,
),
]
}
}

View File

@@ -1,612 +0,0 @@
use crate::generate::{
store_versioned_auxiliary_tfhe_1_3, store_versioned_test_tfhe_1_3, TfhersVersion,
INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION, PRNG_SEED, VALID_TEST_PARAMS_KS32_TUNIFORM,
VALID_TEST_PARAMS_TUNIFORM,
};
use crate::{
DataKind, HlCiphertextTest, HlClientKeyTest, HlCompressedSquashedNoiseCiphertextListTest,
HlHeterogeneousCiphertextListTest, HlServerKeyTest, PkeZkProofAuxiliaryInfo,
TestClassicParameterSet, TestDistribution, TestKS32ParameterSet, TestMetadata,
TestModulusSwitchNoiseReductionParams, TestModulusSwitchType, TestMultiBitParameterSet,
TestNoiseSquashingCompressionParameters, TestNoiseSquashingParams, TestParameterSet,
ZkPkePublicParamsTest, HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::fs::create_dir_all;
use crate::generate::{
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION,
INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION, TEST_PARAMS_NOISE_SQUASHING_COMPRESSION,
};
use tfhe_1_3::boolean::engine::BooleanEngine;
use tfhe_1_3::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_1_3::core_crypto::commons::math::random::RandomGenerator;
use tfhe_1_3::core_crypto::prelude::{DefaultRandomGenerator, TUniform, UnsignedInteger};
use tfhe_1_3::prelude::*;
use tfhe_1_3::shortint::engine::ShortintEngine;
use tfhe_1_3::shortint::parameters::{
CarryModulus, CiphertextModulus, CiphertextModulus32, ClassicPBSParameters,
CoreCiphertextModulus, DecompositionBaseLog, DecompositionLevelCount, DynamicDistribution,
EncryptionKeyChoice, GlweDimension, KeySwitch32PBSParameters, LweBskGroupingFactor,
LweCiphertextCount, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, MultiBitPBSParameters, NoiseEstimationMeasureBound,
NoiseSquashingCompressionParameters, NoiseSquashingParameters, PolynomialSize, RSigmaFactor,
StandardDev, Variance,
};
use tfhe_1_3::shortint::prelude::ModulusSwitchType;
use tfhe_1_3::shortint::AtomicPatternParameters;
use tfhe_1_3::zk::{CompactPkeCrs, ZkComputeLoad, ZkMSBZeroPaddingBitCount};
use tfhe_1_3::{
set_server_key, ClientKey, CompactPublicKey, CompressedServerKey,
CompressedSquashedNoiseCiphertextList, FheBool, FheInt32, FheUint32, FheUint8,
ProvenCompactCiphertextList, Seed, ServerKey,
};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_1_3($msg, $dir, $test_filename)
};
}
macro_rules! store_versioned_auxiliary {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_auxiliary_tfhe_1_3($msg, $dir, $test_filename)
};
}
impl<T> From<TestDistribution> for DynamicDistribution<T>
where
T: UnsignedInteger,
{
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestModulusSwitchNoiseReductionParams> for ModulusSwitchNoiseReductionParams {
fn from(value: TestModulusSwitchNoiseReductionParams) -> Self {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = value;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl From<TestModulusSwitchType> for ModulusSwitchType {
fn from(value: TestModulusSwitchType) -> Self {
match value {
TestModulusSwitchType::Standard => ModulusSwitchType::Standard,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => ModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params.into(),
),
TestModulusSwitchType::CenteredMeanNoiseReduction => {
ModulusSwitchType::CenteredMeanNoiseReduction
}
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level as u64),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: value
.modulus_switch_noise_reduction_params
.into(),
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestKS32ParameterSet> for KeySwitch32PBSParameters {
fn from(value: TestKS32ParameterSet) -> Self {
let TestKS32ParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
modulus_switch_noise_reduction_params,
post_keyswitch_ciphertext_modulus,
} = value;
Self {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
post_keyswitch_ciphertext_modulus: CiphertextModulus32::try_new(
post_keyswitch_ciphertext_modulus,
)
.unwrap(),
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.into(),
}
}
}
impl From<TestParameterSet> for AtomicPatternParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
let classic = ClassicPBSParameters::from(test_classic_parameter_set);
classic.into()
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
let multibit = MultiBitPBSParameters::from(test_parameter_set_multi_bit);
multibit.into()
}
TestParameterSet::TestKS32ParameterSet(test_ks32_parameter_set) => {
let ks32 = KeySwitch32PBSParameters::from(test_ks32_parameter_set);
ks32.into()
}
}
}
}
impl From<TestNoiseSquashingParams> for NoiseSquashingParameters {
fn from(value: TestNoiseSquashingParams) -> Self {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = value;
Self {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.into(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.into(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}
impl From<TestNoiseSquashingCompressionParameters> for NoiseSquashingCompressionParameters {
fn from(value: TestNoiseSquashingCompressionParameters) -> Self {
let TestNoiseSquashingCompressionParameters {
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
packing_ks_key_noise_distribution,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = value;
Self {
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.into(),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
}
}
}
const ZK_PKE_CRS_TEST: ZkPkePublicParamsTest = ZkPkePublicParamsTest {
test_filename: Cow::Borrowed("zk_pke_crs"),
lwe_dimension: VALID_TEST_PARAMS_TUNIFORM.polynomial_size()
* VALID_TEST_PARAMS_TUNIFORM.glwe_dimension(), // Lwe dimension of the "big" key is glwe dimension * polynomial size
max_num_cleartext: 16,
noise_bound: match VALID_TEST_PARAMS_TUNIFORM.lwe_noise_distribution() {
TestDistribution::Gaussian { .. } => unreachable!(),
TestDistribution::TUniform { bound_log2 } => bound_log2 as usize,
},
ciphertext_modulus: VALID_TEST_PARAMS_TUNIFORM.ciphertext_modulus(),
plaintext_modulus: VALID_TEST_PARAMS_TUNIFORM.message_modulus()
* VALID_TEST_PARAMS_TUNIFORM.carry_modulus()
* 2, // *2 for padding bit
padding_bit_count: 1,
};
const HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH: HlHeterogeneousCiphertextListTest =
HlHeterogeneousCiphertextListTest {
test_filename: Cow::Borrowed("hl_proven_heterogeneous_list_zkv2_fasthash"),
key_filename: Cow::Borrowed("client_key"),
clear_values: Cow::Borrowed(&[17u8 as u64, -12i8 as u64, false as u64, true as u64]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
compressed: false,
proof_info: Some(PkeZkProofAuxiliaryInfo {
public_key_filename: Cow::Borrowed("public_key"),
params_filename: ZK_PKE_CRS_TEST.test_filename,
metadata: Cow::Borrowed("2vdrawkcab"),
}),
};
const HL_CLIENTKEY_MS_MEAN_COMPENSATION: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed("client_key_ms_mean_compensation"),
parameters: INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION,
};
const HL_SERVERKEY_MS_MEAN_COMPENSATION: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ms_mean_compensation"),
client_key_filename: Cow::Borrowed("client_key_ms_mean_compensation.cbor"),
rerand_cpk_filename: None,
compressed: false,
};
const HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST: HlCompressedSquashedNoiseCiphertextListTest =
HlCompressedSquashedNoiseCiphertextListTest {
test_filename: Cow::Borrowed("hl_compressed_squashed_noise_ciphertext_list"),
key_filename: Cow::Borrowed("client_key_with_noise_squashing"),
clear_values: Cow::Borrowed(&[
54679568u32 as u64,
-12396372i32 as u64,
12396372i32 as u64,
false as u64,
true as u64,
]),
data_kinds: Cow::Borrowed(&[
DataKind::Unsigned,
DataKind::Signed,
DataKind::Signed,
DataKind::Bool,
DataKind::Bool,
]),
};
const CLIENT_KEY_KS32_FILENAME: &str = "client_key_ks32";
const CLIENT_KEY_KS32_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
parameters: VALID_TEST_PARAMS_KS32_TUNIFORM,
};
const SERVER_KEY_KS32_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ks32"),
client_key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
rerand_cpk_filename: None,
compressed: false,
};
const COMPRESSED_SERVER_KEY_KS32_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key_ks32"),
client_key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
rerand_cpk_filename: None,
compressed: true,
};
const CT_KS32_TEST: HlCiphertextTest = HlCiphertextTest {
test_filename: Cow::Borrowed("ct_ks32"),
key_filename: Cow::Borrowed(CLIENT_KEY_KS32_FILENAME),
compressed: false,
clear_value: 25,
};
pub struct V1_3;
impl TfhersVersion for V1_3 {
const VERSION_NUMBER: &'static str = "1.3";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
let mut zk_rng: RandomGenerator<DefaultRandomGenerator> =
RandomGenerator::new(Seed(PRNG_SEED));
// Generate a compact public key needed to create a compact list
let config =
tfhe_1_3::ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM).build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let compact_pub_key = CompactPublicKey::new(&hl_client_key);
let crs = CompactPkeCrs::new(
LweDimension(ZK_PKE_CRS_TEST.lwe_dimension),
LweCiphertextCount(ZK_PKE_CRS_TEST.max_num_cleartext),
TUniform::<u64>::new(ZK_PKE_CRS_TEST.noise_bound as u32),
CiphertextModulus::new(ZK_PKE_CRS_TEST.ciphertext_modulus),
ZK_PKE_CRS_TEST.plaintext_modulus as u64,
ZkMSBZeroPaddingBitCount(ZK_PKE_CRS_TEST.padding_bit_count as u64),
&mut zk_rng,
)
.unwrap();
// Store the crs
store_versioned_auxiliary!(&crs, &dir, &ZK_PKE_CRS_TEST.test_filename);
// Store the associated client key to be able to decrypt the ciphertexts in the list
store_versioned_auxiliary!(
&hl_client_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.key_filename
);
store_versioned_auxiliary!(
&compact_pub_key,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH
.proof_info
.unwrap()
.public_key_filename
);
let mut proven_builder = ProvenCompactCiphertextList::builder(&compact_pub_key);
proven_builder
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[0] as u8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[1] as i8)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[2] != 0)
.push(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.clear_values[3] != 0);
let proven_list_packed = proven_builder
.build_with_proof_packed(
&crs,
HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH
.proof_info
.unwrap()
.metadata
.as_bytes(),
ZkComputeLoad::Verify,
)
.unwrap();
store_versioned_test!(
&proven_list_packed,
&dir,
&HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH.test_filename,
);
let config = tfhe_1_3::ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_MS_MEAN_COMPENSATION.parameters,
)
.build();
let (hl_client_key, hl_server_key) = tfhe_1_3::generate_keys(config);
store_versioned_test!(
&hl_client_key,
&dir,
&HL_CLIENTKEY_MS_MEAN_COMPENSATION.test_filename
);
store_versioned_test!(
&hl_server_key,
&dir,
&HL_SERVERKEY_MS_MEAN_COMPENSATION.test_filename,
);
// Generate data for the squashed noise compressed ciphertext list
{
let config = tfhe_1_3::ConfigBuilder::with_custom_parameters(
INSECURE_SMALL_TEST_PARAMS_MS_NOISE_REDUCTION,
)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.into(),
)
.enable_noise_squashing_compression(TEST_PARAMS_NOISE_SQUASHING_COMPRESSION.into())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let input_a = FheUint32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[0] as u32,
&hl_client_key,
);
let input_b = FheInt32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[1] as i32,
&hl_client_key,
);
let input_c = FheInt32::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[2] as i32,
&hl_client_key,
);
let input_d = FheBool::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[3] != 0,
&hl_client_key,
);
let input_e = FheBool::encrypt(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.clear_values[4] != 0,
&hl_client_key,
);
let ns_a = input_a.squash_noise().unwrap();
let ns_b = input_b.squash_noise().unwrap();
let ns_c = input_c.squash_noise().unwrap();
let ns_d = input_d.squash_noise().unwrap();
let ns_e = input_e.squash_noise().unwrap();
let compressed_list = CompressedSquashedNoiseCiphertextList::builder()
.push(ns_a)
.push(ns_b)
.push(ns_c)
.push(ns_d)
.push(ns_e)
.build()
.unwrap();
store_versioned_auxiliary!(
&hl_client_key,
&dir,
&HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.key_filename
);
store_versioned_test!(
&compressed_list,
&dir,
&HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST.test_filename,
);
};
// Generate data for the KS32 AP
{
let config = tfhe_1_3::ConfigBuilder::default()
.use_custom_parameters(CLIENT_KEY_KS32_TEST.parameters)
.build();
let hl_client_key = ClientKey::generate(config);
let compressed_server_key = CompressedServerKey::new(&hl_client_key);
let hl_server_key = compressed_server_key.decompress();
let ct = FheUint8::encrypt(CT_KS32_TEST.clear_value, &hl_client_key);
store_versioned_test!(&hl_client_key, &dir, &CLIENT_KEY_KS32_TEST.test_filename);
store_versioned_test!(&hl_server_key, &dir, &SERVER_KEY_KS32_TEST.test_filename);
store_versioned_test!(
&compressed_server_key,
&dir,
&COMPRESSED_SERVER_KEY_KS32_TEST.test_filename
);
store_versioned_test!(&ct, &dir, &CT_KS32_TEST.test_filename);
}
vec![
TestMetadata::HlHeterogeneousCiphertextList(HL_PROVEN_COMPACTLIST_TEST_ZKV2_FASTHASH),
TestMetadata::HlClientKey(HL_CLIENTKEY_MS_MEAN_COMPENSATION),
TestMetadata::HlServerKey(HL_SERVERKEY_MS_MEAN_COMPENSATION),
TestMetadata::HlCompressedSquashedNoiseCiphertextList(
HL_COMPRESSED_SQUASHED_NOISE_CIPHERTEXT_LIST,
),
TestMetadata::HlClientKey(CLIENT_KEY_KS32_TEST),
TestMetadata::HlServerKey(SERVER_KEY_KS32_TEST),
TestMetadata::HlServerKey(COMPRESSED_SERVER_KEY_KS32_TEST),
TestMetadata::HlCiphertext(CT_KS32_TEST),
]
}
}

View File

@@ -1,602 +0,0 @@
use crate::generate::{
store_versioned_auxiliary_tfhe_1_4, store_versioned_test_tfhe_1_4, TfhersVersion,
INSECURE_DEDICATED_CPK_TEST_PARAMS,
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION,
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MULTI_BIT, INSECURE_SMALL_TEST_PARAMS_KS32,
INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION, INSECURE_SMALL_TEST_PARAMS_MULTI_BIT,
KS_TO_BIG_TEST_PARAMS, KS_TO_SMALL_TEST_PARAMS, VALID_TEST_PARAMS_TUNIFORM,
VALID_TEST_PARAMS_TUNIFORM_COMPRESSION,
};
use crate::{
HlClientKeyTest, HlCompressedKVStoreTest, HlServerKeyTest,
HlSquashedNoiseUnsignedCiphertextTest, TestClassicParameterSet,
TestCompactPublicKeyEncryptionParameters, TestCompressionParameterSet, TestDistribution,
TestKS32ParameterSet, TestKeySwitchingParams, TestMetadata,
TestModulusSwitchNoiseReductionParams, TestModulusSwitchType, TestMultiBitParameterSet,
TestNoiseSquashingParams, TestNoiseSquashingParamsMultiBit, TestParameterSet, HL_MODULE_NAME,
};
use std::borrow::Cow;
use std::collections::HashMap;
use std::fs::create_dir_all;
use tfhe_1_4::boolean::engine::BooleanEngine;
use tfhe_1_4::core_crypto::commons::generators::DeterministicSeeder;
use tfhe_1_4::core_crypto::prelude::{DefaultRandomGenerator, UnsignedInteger};
use tfhe_1_4::prelude::*;
use tfhe_1_4::shortint::engine::ShortintEngine;
use tfhe_1_4::shortint::parameters::noise_squashing::NoiseSquashingMultiBitParameters;
use tfhe_1_4::shortint::parameters::{
CarryModulus, CiphertextModulus, CiphertextModulus32, CiphertextModulusLog,
CompactCiphertextListExpansionKind, CompactPublicKeyEncryptionParameters,
CompressionParameters, CoreCiphertextModulus, DecompositionBaseLog, DecompositionLevelCount,
DynamicDistribution, EncryptionKeyChoice, GlweDimension, KeySwitch32PBSParameters,
LweBskGroupingFactor, LweCiphertextCount, LweDimension, MaxNoiseLevel, MessageModulus,
ModulusSwitchNoiseReductionParams, ModulusSwitchType, NoiseEstimationMeasureBound,
NoiseSquashingClassicParameters, NoiseSquashingParameters, PolynomialSize, RSigmaFactor,
ShortintKeySwitchingParameters, StandardDev, SupportedCompactPkeZkScheme, Variance,
};
use tfhe_1_4::shortint::{AtomicPatternParameters, ClassicPBSParameters, MultiBitPBSParameters};
use tfhe_1_4::{
set_server_key, ClientKey, CompressedCompactPublicKey, CompressedServerKey, ConfigBuilder,
FheUint32, FheUint64, KVStore, Seed, ServerKey,
};
macro_rules! store_versioned_test {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_test_tfhe_1_4($msg, $dir, $test_filename)
};
}
macro_rules! store_versioned_auxiliary {
($msg:expr, $dir:expr, $test_filename:expr $(,)? ) => {
store_versioned_auxiliary_tfhe_1_4($msg, $dir, $test_filename)
};
}
impl<T> From<TestDistribution> for DynamicDistribution<T>
where
T: UnsignedInteger,
{
fn from(value: TestDistribution) -> Self {
match value {
TestDistribution::Gaussian { stddev } => {
DynamicDistribution::new_gaussian_from_std_dev(StandardDev(stddev))
}
TestDistribution::TUniform { bound_log2 } => {
DynamicDistribution::new_t_uniform(bound_log2)
}
}
}
}
impl From<TestModulusSwitchNoiseReductionParams> for ModulusSwitchNoiseReductionParams {
fn from(value: TestModulusSwitchNoiseReductionParams) -> Self {
let TestModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count,
ms_bound,
ms_r_sigma_factor,
ms_input_variance,
} = value;
ModulusSwitchNoiseReductionParams {
modulus_switch_zeros_count: LweCiphertextCount(modulus_switch_zeros_count),
ms_bound: NoiseEstimationMeasureBound(ms_bound),
ms_r_sigma_factor: RSigmaFactor(ms_r_sigma_factor),
ms_input_variance: Variance(ms_input_variance),
}
}
}
impl From<TestModulusSwitchType> for ModulusSwitchType {
fn from(value: TestModulusSwitchType) -> Self {
match value {
TestModulusSwitchType::Standard => ModulusSwitchType::Standard,
TestModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params,
) => ModulusSwitchType::DriftTechniqueNoiseReduction(
test_modulus_switch_noise_reduction_params.into(),
),
TestModulusSwitchType::CenteredMeanNoiseReduction => {
ModulusSwitchType::CenteredMeanNoiseReduction
}
}
}
}
impl From<TestClassicParameterSet> for ClassicPBSParameters {
fn from(value: TestClassicParameterSet) -> Self {
ClassicPBSParameters {
lwe_dimension: LweDimension(value.lwe_dimension),
glwe_dimension: GlweDimension(value.glwe_dimension),
polynomial_size: PolynomialSize(value.polynomial_size),
lwe_noise_distribution: value.lwe_noise_distribution.into(),
glwe_noise_distribution: value.glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(value.pbs_base_log),
pbs_level: DecompositionLevelCount(value.pbs_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
ks_level: DecompositionLevelCount(value.ks_level),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(value.max_noise_level as u64),
log2_p_fail: value.log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*value.encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
modulus_switch_noise_reduction_params: value
.modulus_switch_noise_reduction_params
.into(),
}
}
}
impl From<TestMultiBitParameterSet> for MultiBitPBSParameters {
fn from(value: TestMultiBitParameterSet) -> Self {
let TestMultiBitParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
encryption_key_choice,
grouping_factor,
} = value;
MultiBitPBSParameters {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
encryption_key_choice: {
match &*encryption_key_choice {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
}
},
grouping_factor: LweBskGroupingFactor(grouping_factor),
deterministic_execution: false,
}
}
}
impl From<TestKS32ParameterSet> for KeySwitch32PBSParameters {
fn from(value: TestKS32ParameterSet) -> Self {
let TestKS32ParameterSet {
lwe_dimension,
glwe_dimension,
polynomial_size,
lwe_noise_distribution,
glwe_noise_distribution,
pbs_base_log,
pbs_level,
ks_base_log,
ks_level,
message_modulus,
ciphertext_modulus,
carry_modulus,
max_noise_level,
log2_p_fail,
modulus_switch_noise_reduction_params,
post_keyswitch_ciphertext_modulus,
} = value;
Self {
lwe_dimension: LweDimension(lwe_dimension),
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
lwe_noise_distribution: lwe_noise_distribution.into(),
glwe_noise_distribution: glwe_noise_distribution.into(),
pbs_base_log: DecompositionBaseLog(pbs_base_log),
pbs_level: DecompositionLevelCount(pbs_level),
ks_base_log: DecompositionBaseLog(ks_base_log),
ks_level: DecompositionLevelCount(ks_level),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
max_noise_level: MaxNoiseLevel::new(max_noise_level as u64),
log2_p_fail,
post_keyswitch_ciphertext_modulus: CiphertextModulus32::try_new(
post_keyswitch_ciphertext_modulus,
)
.unwrap(),
ciphertext_modulus: CiphertextModulus::try_new(ciphertext_modulus).unwrap(),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.into(),
}
}
}
impl From<TestParameterSet> for AtomicPatternParameters {
fn from(value: TestParameterSet) -> Self {
match value {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
let classic = ClassicPBSParameters::from(test_classic_parameter_set);
classic.into()
}
TestParameterSet::TestMultiBitParameterSet(test_parameter_set_multi_bit) => {
let classic = MultiBitPBSParameters::from(test_parameter_set_multi_bit);
classic.into()
}
TestParameterSet::TestKS32ParameterSet(test_ks32_parameter_set) => {
let ks32 = KeySwitch32PBSParameters::from(test_ks32_parameter_set);
ks32.into()
}
}
}
}
impl From<TestNoiseSquashingParams> for NoiseSquashingParameters {
fn from(value: TestNoiseSquashingParams) -> Self {
let TestNoiseSquashingParams {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
modulus_switch_noise_reduction_params,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = value;
Self::Classic(NoiseSquashingClassicParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.into(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
modulus_switch_noise_reduction_params: modulus_switch_noise_reduction_params.into(),
message_modulus: MessageModulus(message_modulus.try_into().unwrap()),
carry_modulus: CarryModulus(carry_modulus.try_into().unwrap()),
ciphertext_modulus: if ciphertext_modulus == 0 {
CoreCiphertextModulus::new_native()
} else {
CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap()
},
})
}
}
impl From<TestNoiseSquashingParamsMultiBit> for NoiseSquashingParameters {
fn from(value: TestNoiseSquashingParamsMultiBit) -> Self {
let TestNoiseSquashingParamsMultiBit {
glwe_dimension,
polynomial_size,
glwe_noise_distribution,
decomp_base_log,
decomp_level_count,
grouping_factor,
message_modulus,
carry_modulus,
ciphertext_modulus,
} = value;
Self::MultiBit(NoiseSquashingMultiBitParameters {
glwe_dimension: GlweDimension(glwe_dimension),
polynomial_size: PolynomialSize(polynomial_size),
glwe_noise_distribution: glwe_noise_distribution.into(),
decomp_base_log: DecompositionBaseLog(decomp_base_log),
decomp_level_count: DecompositionLevelCount(decomp_level_count),
grouping_factor: LweBskGroupingFactor(grouping_factor),
message_modulus: MessageModulus(message_modulus as u64),
carry_modulus: CarryModulus(carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(ciphertext_modulus).unwrap(),
deterministic_execution: false,
})
}
}
impl From<TestKeySwitchingParams> for ShortintKeySwitchingParameters {
fn from(value: TestKeySwitchingParams) -> Self {
Self {
ks_level: DecompositionLevelCount(value.ks_level),
ks_base_log: DecompositionBaseLog(value.ks_base_log),
destination_key: match &*value.destination_key {
"big" => EncryptionKeyChoice::Big,
"small" => EncryptionKeyChoice::Small,
_ => panic!("Invalid encryption key choice"),
},
}
}
}
impl From<TestCompactPublicKeyEncryptionParameters> for CompactPublicKeyEncryptionParameters {
fn from(value: TestCompactPublicKeyEncryptionParameters) -> Self {
Self {
encryption_lwe_dimension: LweDimension(value.encryption_lwe_dimension),
encryption_noise_distribution: value.encryption_noise_distribution.into(),
message_modulus: MessageModulus(value.message_modulus as u64),
carry_modulus: CarryModulus(value.carry_modulus as u64),
ciphertext_modulus: CoreCiphertextModulus::try_new(value.ciphertext_modulus).unwrap(),
expansion_kind: match &*value.expansion_kind {
"requires_casting" => CompactCiphertextListExpansionKind::RequiresCasting,
_ => panic!("Invalid expansion kind"),
},
zk_scheme: match &*value.zk_scheme {
"zkv1" => SupportedCompactPkeZkScheme::V1,
"zkv2" => SupportedCompactPkeZkScheme::V2,
_ => panic!("Invalid zk scheme"),
},
}
}
}
impl From<TestCompressionParameterSet> for CompressionParameters {
fn from(value: TestCompressionParameterSet) -> Self {
let TestCompressionParameterSet {
br_level,
br_base_log,
packing_ks_level,
packing_ks_base_log,
packing_ks_polynomial_size,
packing_ks_glwe_dimension,
lwe_per_glwe,
storage_log_modulus,
packing_ks_key_noise_distribution,
} = value;
Self {
br_level: DecompositionLevelCount(br_level),
br_base_log: DecompositionBaseLog(br_base_log),
packing_ks_level: DecompositionLevelCount(packing_ks_level),
packing_ks_base_log: DecompositionBaseLog(packing_ks_base_log),
packing_ks_polynomial_size: PolynomialSize(packing_ks_polynomial_size),
packing_ks_glwe_dimension: GlweDimension(packing_ks_glwe_dimension),
lwe_per_glwe: LweCiphertextCount(lwe_per_glwe),
storage_log_modulus: CiphertextModulusLog(storage_log_modulus),
packing_ks_key_noise_distribution: packing_ks_key_noise_distribution.into(),
}
}
}
const TEST_FILENAME: Cow<'static, str> = Cow::Borrowed("client_key_with_noise_squashing");
const HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST: HlClientKeyTest = HlClientKeyTest {
test_filename: TEST_FILENAME,
parameters: INSECURE_SMALL_TEST_PARAMS_MULTI_BIT,
};
const HL_SERVERKEY_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_with_noise_squashing"),
client_key_filename: TEST_FILENAME,
rerand_cpk_filename: None,
compressed: false,
};
const HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST: HlSquashedNoiseUnsignedCiphertextTest =
HlSquashedNoiseUnsignedCiphertextTest {
test_filename: Cow::Borrowed("squashed_noise_unsigned_ciphertext"),
key_filename: TEST_FILENAME,
clear_value: 42,
};
const HL_SERVERKEY_RERAND_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_for_rerand"),
client_key_filename: Cow::Borrowed("client_key_for_rerand"),
rerand_cpk_filename: Some(Cow::Borrowed("cpk_for_rerand")),
compressed: false,
};
const HL_COMPRESSED_KV_STORE_TEST: HlCompressedKVStoreTest = HlCompressedKVStoreTest {
kv_store_file_name: Cow::Borrowed("compressed_kv_store"),
client_key_file_name: Cow::Borrowed("client_key_for_kv_store"),
server_key_file_name: Cow::Borrowed("server_key_for_kv_store"),
num_elements: 512,
};
const HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("server_key_ks32_noise_squashing"),
client_key_filename: Cow::Borrowed("client_key_ks32_noise_squashing"),
rerand_cpk_filename: None,
compressed: false,
};
const HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST: HlServerKeyTest = HlServerKeyTest {
test_filename: Cow::Borrowed("compressed_server_key_ks32_noise_squashing"),
client_key_filename: Cow::Borrowed("client_key_ks32_noise_squashing"),
rerand_cpk_filename: None,
compressed: true,
};
pub struct V1_4;
impl TfhersVersion for V1_4 {
const VERSION_NUMBER: &'static str = "1.4";
fn seed_prng(seed: u128) {
let mut seeder = DeterministicSeeder::<DefaultRandomGenerator>::new(Seed(seed));
let shortint_engine = ShortintEngine::new_from_seeder(&mut seeder);
ShortintEngine::with_thread_local_mut(|local_engine| {
let _ = std::mem::replace(local_engine, shortint_engine);
});
let boolean_engine = BooleanEngine::new_from_seeder(&mut seeder);
BooleanEngine::replace_thread_local(boolean_engine);
}
fn gen_shortint_data() -> Vec<TestMetadata> {
Vec::new()
}
fn gen_hl_data() -> Vec<TestMetadata> {
let dir = Self::data_dir().join(HL_MODULE_NAME);
create_dir_all(&dir).unwrap();
// Test noise squahsing multibit
{
let config = ConfigBuilder::with_custom_parameters(
HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.parameters,
)
.enable_noise_squashing(INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MULTI_BIT.into())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let input = FheUint32::encrypt(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.clear_value as u32,
&hl_client_key,
);
let ns = input.squash_noise().unwrap();
store_versioned_test!(
&hl_client_key,
&dir,
&HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST.test_filename
);
store_versioned_test!(&hl_server_key, &dir, &HL_SERVERKEY_TEST.test_filename,);
store_versioned_test!(
&ns,
&dir,
&HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST.test_filename,
);
}
// Test re-randomization
{
let params = INSECURE_SMALL_TEST_PARAMS_MS_MEAN_COMPENSATION;
let cpk_params = (
INSECURE_DEDICATED_CPK_TEST_PARAMS.into(),
KS_TO_SMALL_TEST_PARAMS.into(),
);
let re_rand_ks_params = KS_TO_BIG_TEST_PARAMS;
let config = ConfigBuilder::with_custom_parameters(params)
.use_dedicated_compact_public_key_parameters(cpk_params)
.enable_ciphertext_re_randomization(re_rand_ks_params.into())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
let hl_public_key = CompressedCompactPublicKey::new(&hl_client_key);
store_versioned_auxiliary!(
&hl_client_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.client_key_filename
);
store_versioned_auxiliary!(
&hl_public_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.rerand_cpk_filename.unwrap()
);
store_versioned_test!(
&hl_server_key,
&dir,
&HL_SERVERKEY_RERAND_TEST.test_filename,
);
}
// Test CompressedKVStore
{
let config = ConfigBuilder::with_custom_parameters(VALID_TEST_PARAMS_TUNIFORM)
.enable_compression(VALID_TEST_PARAMS_TUNIFORM_COMPRESSION.into())
.build();
let hl_client_key = ClientKey::generate(config);
let hl_server_key = ServerKey::new(&hl_client_key);
set_server_key(hl_server_key.clone());
let mut clear_store = HashMap::new();
let mut store = KVStore::new();
for key in 0..HL_COMPRESSED_KV_STORE_TEST.num_elements as u32 {
let value = u64::MAX - u64::from(key);
let encrypted = FheUint64::encrypt(value, &hl_client_key);
let _ = clear_store.insert(key, value);
let _ = store.insert_with_clear_key(key, encrypted);
}
let compressed_kv_store = store.compress().unwrap();
store_versioned_auxiliary!(
&hl_client_key,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.client_key_file_name
);
store_versioned_auxiliary!(
&hl_server_key,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.server_key_file_name
);
store_versioned_test!(
&compressed_kv_store,
&dir,
&HL_COMPRESSED_KV_STORE_TEST.kv_store_file_name,
);
}
{
let config =
tfhe_1_4::ConfigBuilder::with_custom_parameters(INSECURE_SMALL_TEST_PARAMS_KS32)
.enable_noise_squashing(
INSECURE_SMALL_TEST_NOISE_SQUASHING_PARAMS_MS_NOISE_REDUCTION.into(),
)
.build();
let client_key = ClientKey::generate(config);
let compressed_server_key = CompressedServerKey::new(&client_key);
let server_key = compressed_server_key.decompress();
store_versioned_auxiliary!(
&client_key,
&dir,
&HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST.client_key_filename
);
store_versioned_test!(
&compressed_server_key,
&dir,
&HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST.test_filename
);
store_versioned_test!(
&server_key,
&dir,
&HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST.test_filename
);
}
vec![
TestMetadata::HlClientKey(HL_CLIENTKEY_WITH_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_TEST),
TestMetadata::HlSquashedNoiseUnsignedCiphertext(
HL_SQUASHED_NOISE_UNSIGNED_CIPHERTEXT_TEST,
),
TestMetadata::HlServerKey(HL_SERVERKEY_RERAND_TEST),
TestMetadata::HlCompressedKVStoreTest(HL_COMPRESSED_KV_STORE_TEST),
TestMetadata::HlServerKey(HL_COMPRESSED_SERVERKEY_KS32_NOISE_SQUASHING_TEST),
TestMetadata::HlServerKey(HL_SERVERKEY_KS32_NOISE_SQUASHING_TEST),
]
}
}

View File

@@ -3,21 +3,11 @@ use std::fs::{self, File};
use std::path::{Path, PathBuf};
use bincode::Options;
use semver::Version;
use serde::Serialize;
use tfhe_0_11_versionable::Versionize as VersionizeTfhe_0_11;
use tfhe_1_0_versionable::Versionize as VersionizeTfhe_1_0;
use tfhe_1_1_versionable::Versionize as VersionizeTfhe_1_1;
use tfhe_1_3_versionable::Versionize as VersionizeTfhe_1_3;
use tfhe_1_4_versionable::Versionize as VersionizeTfhe_1_4;
use tfhe_versionable::{Versionize as VersionizeTfhe_0_10, Versionize as VersionizeTfhe_0_8};
use serde::de::DeserializeOwned;
use crate::{
data_dir, dir_for_version, TestClassicParameterSet, TestCompactPublicKeyEncryptionParameters,
TestCompressionParameterSet, TestDistribution, TestKS32ParameterSet, TestKeySwitchingParams,
TestMetadata, TestModulusSwitchNoiseReductionParams, TestModulusSwitchType,
TestMultiBitParameterSet, TestNoiseSquashingCompressionParameters, TestNoiseSquashingParams,
TestNoiseSquashingParamsMultiBit, TestParameterSet,
};
use crate::*;
pub const PRNG_SEED: u128 = 0xdeadbeef;
@@ -344,67 +334,195 @@ pub fn save_bcode<Data: Serialize, P: AsRef<Path>>(msg: &Data, path: P) {
options.serialize_into(&mut file, msg).unwrap();
}
/// Stores the test data in `dir`, encoded in both cbor and bincode, using the right
/// tfhe-versionable version
macro_rules! define_store_versioned_test_fn {
($fn_name:ident, $versionize_trait:ident) => {
pub fn $fn_name<Data: $versionize_trait, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
let versioned = msg.versionize();
/// Stores the test data in `dir`, encoded in both cbor and bincode, using the provided versionize
/// function
pub fn generic_store_versioned_test<'a, Data: 'a, Vers: Serialize + 'a, P: AsRef<Path>>(
versionize: impl FnOnce(&'a Data) -> Vers,
msg: &'a Data,
dir: P,
test_filename: &str,
) {
let versioned = versionize(msg);
// Store in cbor
let filename_cbor = format!("{}.cbor", test_filename);
save_cbor(&versioned, dir.as_ref().join(filename_cbor));
// Store in cbor
let filename_cbor = format!("{}.cbor", test_filename);
save_cbor(&versioned, dir.as_ref().join(filename_cbor));
// Store in bincode
let filename_bincode = format!("{}.bcode", test_filename);
save_bcode(&versioned, dir.as_ref().join(filename_bincode));
}
};
// Store in bincode
let filename_bincode = format!("{}.bcode", test_filename);
save_bcode(&versioned, dir.as_ref().join(filename_bincode));
}
define_store_versioned_test_fn!(store_versioned_test_tfhe_0_8, VersionizeTfhe_0_8);
define_store_versioned_test_fn!(store_versioned_test_tfhe_0_10, VersionizeTfhe_0_10);
define_store_versioned_test_fn!(store_versioned_test_tfhe_0_11, VersionizeTfhe_0_11);
define_store_versioned_test_fn!(store_versioned_test_tfhe_1_0, VersionizeTfhe_1_0);
define_store_versioned_test_fn!(store_versioned_test_tfhe_1_1, VersionizeTfhe_1_1);
define_store_versioned_test_fn!(store_versioned_test_tfhe_1_3, VersionizeTfhe_1_3);
define_store_versioned_test_fn!(store_versioned_test_tfhe_1_4, VersionizeTfhe_1_4);
/// Stores the auxiliary data in `dir`, encoded in cbor, using the right tfhe-versionable version
macro_rules! define_store_versioned_auxiliary_fn {
($fn_name:ident, $versionize_trait:ident) => {
pub fn $fn_name<Data: $versionize_trait, P: AsRef<Path>>(
msg: &Data,
dir: P,
test_filename: &str,
) {
let versioned = msg.versionize();
/// Stores the auxiliary data in `dir`, encoded in cbor, using the provided versionize function
pub fn generic_store_versioned_auxiliary<'a, Data: 'a, Vers: Serialize + 'a, P: AsRef<Path>>(
versionize: impl FnOnce(&'a Data) -> Vers,
msg: &'a Data,
dir: P,
test_filename: &str,
) {
let versioned = versionize(msg);
// Store in cbor
let filename_cbor = format!("{}.cbor", test_filename);
save_cbor(&versioned, dir.as_ref().join(filename_cbor));
}
};
// Store in cbor
let filename_cbor = format!("{}.cbor", test_filename);
save_cbor(&versioned, dir.as_ref().join(filename_cbor));
}
define_store_versioned_auxiliary_fn!(store_versioned_auxiliary_tfhe_0_8, VersionizeTfhe_0_8);
define_store_versioned_auxiliary_fn!(store_versioned_auxiliary_tfhe_0_10, VersionizeTfhe_0_10);
define_store_versioned_auxiliary_fn!(store_versioned_auxiliary_tfhe_0_11, VersionizeTfhe_0_11);
define_store_versioned_auxiliary_fn!(store_versioned_auxiliary_tfhe_1_3, VersionizeTfhe_1_3);
define_store_versioned_auxiliary_fn!(store_versioned_auxiliary_tfhe_1_4, VersionizeTfhe_1_4);
pub fn store_metadata<Meta: Serialize, P: AsRef<Path>>(value: &Meta, path: P) {
/// Store the test metadata vec for all modules into specific ron files
pub fn store_metadata<P: AsRef<Path>>(testcases: Vec<Testcase>, base_data_dir: P) {
let mut sorted: Vec<_> = testcases
.iter()
.map(|data| {
let vers = major_minor_parse(&data.tfhe_version_min);
(vers, data)
})
.collect();
sorted.sort_by_key(|(vers, _)| vers.clone());
let sorted = sorted.iter().map(|(_, data)| *data);
let base_data_dir = base_data_dir.as_ref();
let shortint_testcases: Vec<Testcase> = sorted
.clone()
.filter(|test| test.tfhe_module == SHORTINT_MODULE_NAME)
.cloned()
.collect();
store_ron(
&shortint_testcases,
base_data_dir.join(format!("{SHORTINT_MODULE_NAME}.ron")),
);
let high_level_api_testcases: Vec<Testcase> = sorted
.filter(|test| test.tfhe_module == HL_MODULE_NAME)
.cloned()
.collect();
store_ron(
&high_level_api_testcases,
base_data_dir.join(format!("{HL_MODULE_NAME}.ron")),
);
}
fn store_ron<Meta: Serialize, P: AsRef<Path>>(value: &Meta, path: P) {
let serialized = ron::ser::to_string_pretty(value, ron::ser::PrettyConfig::default()).unwrap();
fs::write(path, serialized).unwrap();
}
fn load_ron<Meta: DeserializeOwned, P: AsRef<Path>>(path: P) -> Option<Meta> {
File::open(path)
.map(|f| ron::de::from_reader(f).unwrap())
.ok()
}
/// Update the metadata with data for a specific version.
///
/// All the metadata in the vec should be for the same TFHE-rs version.
/// Old metadata for this version will be removed and replaced with new data.
/// Old metadata for the other versions will not be modified.
pub fn update_metadata_for_version<P: AsRef<Path>>(testcases: Vec<Testcase>, base_data_dir: P) {
let base_data_dir = base_data_dir.as_ref();
let shortint_testcases: Vec<Testcase> = testcases
.iter()
.filter(|test| test.tfhe_module == SHORTINT_MODULE_NAME)
.cloned()
.collect();
update_metadata_for_version_and_module(
&shortint_testcases,
base_data_dir.join(format!("{SHORTINT_MODULE_NAME}.ron")),
);
let high_level_api_testcases: Vec<Testcase> = testcases
.iter()
.filter(|test| test.tfhe_module == HL_MODULE_NAME)
.cloned()
.collect();
update_metadata_for_version_and_module(
&high_level_api_testcases,
base_data_dir.join(format!("{HL_MODULE_NAME}.ron")),
);
}
pub fn display_metadata(testcases: &[Testcase]) {
let serialized =
ron::ser::to_string_pretty(testcases, ron::ser::PrettyConfig::default()).unwrap();
println!("{serialized}")
}
pub fn load_metadata_from_str(data: &str) -> Vec<Testcase> {
ron::from_str(data).unwrap()
}
/// Parse a version number where only the major/minor is provided
fn major_minor_parse(vers: &str) -> Version {
Version::parse(&format!("{}.0", vers)).unwrap()
}
fn update_metadata_for_version_and_module<P: AsRef<Path>>(new_data: &[Testcase], path: P) {
let loaded: Vec<Testcase> = load_ron(&path).unwrap_or(Vec::new());
let Some(updated_vers) = new_data
.first()
.map(|data| major_minor_parse(&data.tfhe_version_min))
else {
return;
};
let parsed = loaded.iter().map(|data| {
let vers = major_minor_parse(&data.tfhe_version_min);
(vers, data)
});
let filtered = parsed.filter(|(vers, _)| vers != &updated_vers);
let mut complete: Vec<_> = filtered
.chain(new_data.iter().map(|data| {
let vers = major_minor_parse(&data.tfhe_version_min);
assert_eq!(
updated_vers, vers,
"update_metadata_for_version should be called with data from a single version.\n\
Expected {updated_vers}, got {vers}"
);
(vers, data)
}))
.collect();
complete.sort_by_key(|(vers, _)| vers.clone());
let sorted: Vec<_> = complete.into_iter().map(|(_, data)| data).collect();
store_ron(&sorted, path);
}
/// Generates all the data for the provided version and returns the vec of metadata
pub fn gen_all_data<Vers: TfhersVersion>(base_data_dir: &Path) -> Vec<Testcase> {
Vers::seed_prng(PRNG_SEED);
let shortint_tests = Vers::gen_shortint_data(base_data_dir);
let mut tests: Vec<Testcase> = shortint_tests
.iter()
.map(|metadata| Testcase {
tfhe_version_min: Vers::VERSION_NUMBER.to_string(),
tfhe_module: SHORTINT_MODULE_NAME.to_string(),
metadata: metadata.clone(),
})
.collect();
let hl_tests = Vers::gen_hl_data(base_data_dir);
tests.extend(hl_tests.iter().map(|metadata| Testcase {
tfhe_version_min: Vers::VERSION_NUMBER.to_string(),
tfhe_module: HL_MODULE_NAME.to_string(),
metadata: metadata.clone(),
}));
tests
}
pub trait TfhersVersion {
const VERSION_NUMBER: &'static str;
fn data_dir() -> PathBuf {
let base_data_dir = data_dir(env!("CARGO_MANIFEST_DIR"));
fn data_dir<P: AsRef<Path>>(base_data_dir: P) -> PathBuf {
dir_for_version(base_data_dir, Self::VERSION_NUMBER)
}
@@ -416,11 +534,11 @@ pub trait TfhersVersion {
/// This should create tfhe-rs shortint types, versionize them and store them into the version
/// specific directory. The metadata for the generated tests should be returned in the same
/// order that the tests will be run.
fn gen_shortint_data() -> Vec<TestMetadata>;
fn gen_shortint_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata>;
/// Generates data for the "high_level_api" module for this version.
/// This should create tfhe-rs HL types, versionize them and store them into the version
/// specific directory. The metadata for the generated tests should be returned in the same
/// order that the tests will be run.
fn gen_hl_data() -> Vec<TestMetadata>;
fn gen_hl_data<P: AsRef<Path>>(base_data_dir: P) -> Vec<TestMetadata>;
}

View File

@@ -1,36 +1,16 @@
pub mod generate;
pub mod load;
use core::f64;
use std::borrow::Cow;
use std::path::{Path, PathBuf};
#[cfg(feature = "load")]
use semver::{Prerelease, Version, VersionReq};
#[cfg(feature = "load")]
use std::fmt::Display;
use strum::Display;
use serde::{Deserialize, Serialize};
#[cfg(feature = "generate")]
pub mod data_0_10;
#[cfg(feature = "generate")]
pub mod data_0_11;
#[cfg(feature = "generate")]
pub mod data_0_8;
#[cfg(feature = "generate")]
pub mod data_1_0;
#[cfg(feature = "generate")]
pub mod data_1_1;
#[cfg(feature = "generate")]
pub mod data_1_3;
#[cfg(feature = "generate")]
pub mod data_1_4;
#[cfg(feature = "generate")]
pub mod generate;
#[cfg(feature = "load")]
pub mod load;
const DATA_DIR: &str = "data";
pub const SHORTINT_MODULE_NAME: &str = "shortint";
pub const HL_MODULE_NAME: &str = "high_level_api";
pub const ZK_MODULE_NAME: &str = "zk";
@@ -206,13 +186,6 @@ pub fn dir_for_version<P: AsRef<Path>>(data_dir: P, version: &str) -> PathBuf {
path
}
pub fn data_dir<P: AsRef<Path>>(root: P) -> PathBuf {
let mut path = PathBuf::from(root.as_ref());
path.push(DATA_DIR);
path
}
pub trait TestType {
/// The tfhe-rs module where this type reside
fn module(&self) -> String;
@@ -224,7 +197,6 @@ pub trait TestType {
/// (they will be inferred)
fn test_filename(&self) -> String;
#[cfg(feature = "load")]
fn success(&self, format: load::DataFormat) -> load::TestSuccess {
load::TestSuccess {
module: self.module(),
@@ -234,7 +206,6 @@ pub trait TestType {
}
}
#[cfg(feature = "load")]
fn failure<E: Display>(&self, error: E, format: load::DataFormat) -> load::TestFailure {
load::TestFailure {
module: self.module(),
@@ -309,7 +280,7 @@ impl TestParameterSet {
Self::TestMultiBitParameterSet(value)
}
const fn polynomial_size(&self) -> usize {
pub const fn polynomial_size(&self) -> usize {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.polynomial_size
@@ -323,7 +294,7 @@ impl TestParameterSet {
}
}
const fn glwe_dimension(&self) -> usize {
pub const fn glwe_dimension(&self) -> usize {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.glwe_dimension
@@ -336,7 +307,8 @@ impl TestParameterSet {
}
}
}
const fn lwe_noise_distribution(&self) -> TestDistribution {
pub const fn lwe_noise_distribution(&self) -> TestDistribution {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.lwe_noise_distribution
@@ -349,7 +321,8 @@ impl TestParameterSet {
}
}
}
const fn ciphertext_modulus(&self) -> u128 {
pub const fn ciphertext_modulus(&self) -> u128 {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.ciphertext_modulus
@@ -362,7 +335,8 @@ impl TestParameterSet {
}
}
}
const fn message_modulus(&self) -> usize {
pub const fn message_modulus(&self) -> usize {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.message_modulus
@@ -375,7 +349,8 @@ impl TestParameterSet {
}
}
}
const fn carry_modulus(&self) -> usize {
pub const fn carry_modulus(&self) -> usize {
match self {
TestParameterSet::TestClassicParameterSet(test_classic_parameter_set) => {
test_classic_parameter_set.carry_modulus
@@ -715,7 +690,6 @@ pub struct Testcase {
pub metadata: TestMetadata,
}
#[cfg(feature = "load")]
impl Testcase {
pub fn is_valid_for_version(&self, version: &str) -> bool {
let mut tfhe_version = Version::parse(version).unwrap();

View File

@@ -1,79 +0,0 @@
use std::fs::remove_dir_all;
use std::thread;
use tfhe_backward_compat_data::data_0_10::V0_10;
use tfhe_backward_compat_data::data_0_11::V0_11;
use tfhe_backward_compat_data::data_0_8::V0_8;
use tfhe_backward_compat_data::data_1_0::V1_0;
use tfhe_backward_compat_data::data_1_1::V1_1;
use tfhe_backward_compat_data::data_1_3::V1_3;
use tfhe_backward_compat_data::data_1_4::V1_4;
use tfhe_backward_compat_data::generate::{store_metadata, TfhersVersion, PRNG_SEED};
use tfhe_backward_compat_data::{data_dir, Testcase, HL_MODULE_NAME, SHORTINT_MODULE_NAME};
fn gen_all_data<Vers: TfhersVersion>() -> Vec<Testcase> {
Vers::seed_prng(PRNG_SEED);
let shortint_tests = Vers::gen_shortint_data();
let mut tests: Vec<Testcase> = shortint_tests
.iter()
.map(|metadata| Testcase {
tfhe_version_min: Vers::VERSION_NUMBER.to_string(),
tfhe_module: SHORTINT_MODULE_NAME.to_string(),
metadata: metadata.clone(),
})
.collect();
let hl_tests = Vers::gen_hl_data();
tests.extend(hl_tests.iter().map(|metadata| Testcase {
tfhe_version_min: Vers::VERSION_NUMBER.to_string(),
tfhe_module: HL_MODULE_NAME.to_string(),
metadata: metadata.clone(),
}));
tests
}
fn main() {
let root_dir = env!("CARGO_MANIFEST_DIR");
let data_dir_path = data_dir(root_dir);
remove_dir_all(&data_dir_path).unwrap();
let handler_v0_8 = thread::spawn(gen_all_data::<V0_8>);
let handler_v0_10 = thread::spawn(gen_all_data::<V0_10>);
let handler_v0_11 = thread::spawn(gen_all_data::<V0_11>);
let handler_v1_0 = thread::spawn(gen_all_data::<V1_0>);
let handler_v1_1 = thread::spawn(gen_all_data::<V1_1>);
let handler_v1_3 = thread::spawn(gen_all_data::<V1_3>);
let handler_v1_4 = thread::spawn(gen_all_data::<V1_4>);
let mut testcases = vec![];
testcases.extend_from_slice(&handler_v0_8.join().unwrap());
testcases.extend_from_slice(&handler_v0_10.join().unwrap());
testcases.extend_from_slice(&handler_v0_11.join().unwrap());
testcases.extend_from_slice(&handler_v1_0.join().unwrap());
testcases.extend_from_slice(&handler_v1_1.join().unwrap());
testcases.extend_from_slice(&handler_v1_3.join().unwrap());
testcases.extend_from_slice(&handler_v1_4.join().unwrap());
let shortint_testcases: Vec<Testcase> = testcases
.iter()
.filter(|test| test.tfhe_module == SHORTINT_MODULE_NAME)
.cloned()
.collect();
store_metadata(&shortint_testcases, data_dir_path.join("shortint.ron"));
let high_level_api_testcases: Vec<Testcase> = testcases
.iter()
.filter(|test| test.tfhe_module == HL_MODULE_NAME)
.cloned()
.collect();
store_metadata(
&high_level_api_testcases,
data_dir_path.join("high_level_api.ron"),
);
}