feat(complexity_estimator): complexity of atomic pattern

Resolve #8
This commit is contained in:
rudy
2022-02-03 11:49:49 +01:00
committed by rudy-6-4
parent 7d387baf3e
commit ccd96e8dec
15 changed files with 447 additions and 23 deletions

View File

@@ -39,14 +39,22 @@ jobs:
command: fmt
args: -- --check
- name: cargo build
uses: actions-rs/cargo@v1
env:
RUSTFLAGS: -D warnings
with:
command: build
args: ${{ env.CARGO_ARGS }}
- name: cargo clippy
uses: actions-rs/cargo@v1
with:
command: clippy
args: ${{ env.CARGO_ARGS }}
args: ${{ env.CARGO_ARGS }} -- -D warnings
- name: cargo test
uses: actions-rs/cargo@v1
with:
command: test
args: ${{ env.CARGO_ARGS }}
args: ${{ env.CARGO_ARGS }} --no-fail-fast

View File

@@ -8,3 +8,7 @@ edition = "2021"
[dependencies]
concrete-commons = { git = "ssh://git@github.com/zama-ai/concrete_internal.git", branch = "fix/optimizer_compat" }
concrete-npe = { git = "ssh://git@github.com/zama-ai/concrete_internal.git", branch = "fix/optimizer_compat" }
[dev-dependencies]
approx = "0.5.1"
pretty_assertions = "1"

View File

@@ -0,0 +1,5 @@
/** Global count of operations To simplify all operations (addition and multiplication) count for 1.
* But note that depending on the completexity model used it can be accurately proportional to the real exectution time.
* So complexities are only comparable inside the same complexity model.
*/
pub type Complexity = f64;

58
src/computing_cost/fft.rs Normal file
View File

@@ -0,0 +1,58 @@
use super::complexity::Complexity;
pub trait FftComplexity {
fn fft_complexity(&self, size: u64) -> Complexity;
fn ifft_complexity(&self, size: u64) -> Complexity;
}
/** Standard fft complexity model */
pub struct AsymptoticWithFactors {
factor_fft: f64, // factor applied on asymptotic complexity
factor_ifft: f64, // factor applied on asymptotic complexity
}
impl FftComplexity for AsymptoticWithFactors {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/bootstrap.py#L109
#[inline] // forces to share log2 computation
fn fft_complexity(&self, size: u64) -> Complexity {
let size = size as Complexity;
size * size.log2() * self.factor_fft
}
#[inline] // forces to share log2 computation
fn ifft_complexity(&self, size: u64) -> Complexity {
let size = size as Complexity;
size * size.log2() * self.factor_ifft
}
}
/** Standard fft complexity with 1.0 factor*/
pub const DEFAULT: AsymptoticWithFactors = AsymptoticWithFactors {
factor_fft: 1.0,
factor_ifft: 1.0,
};
#[cfg(test)]
pub mod tests {
use crate::computing_cost::fft;
use crate::computing_cost::fft::{AsymptoticWithFactors, FftComplexity};
/** Standard fft complexity with X factors*/
pub const COST_AWS: AsymptoticWithFactors = AsymptoticWithFactors {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/bootstrap.py#L150
factor_fft: 0.20292695115308917,
factor_ifft: 0.407795078512891,
};
#[test]
fn golden_python_prototype() {
let golden_fft = 664.3856189774724;
let actual_fft = fft::DEFAULT.fft_complexity(100);
approx::assert_relative_eq!(golden_fft, actual_fft, epsilon = f64::EPSILON);
let golden_ifft = 664.3856189774724;
let actual_ifft = fft::DEFAULT.ifft_complexity(100);
approx::assert_relative_eq!(golden_ifft, actual_ifft, epsilon = f64::EPSILON);
}
}

View File

@@ -0,0 +1,3 @@
pub mod complexity;
pub mod fft;
pub mod operators;

View File

@@ -0,0 +1,72 @@
use super::super::complexity::Complexity;
use super::keyswitch_lwe::KeySwitchLWEComplexity;
use super::pbs::PbsComplexity;
use super::{keyswitch_lwe, pbs};
#[allow(clippy::too_many_arguments)]
pub trait AtomicPatternComplexity {
fn complexity(
&self,
sum_size: u64,
input_lwe_dimension: u64, //n_big
internal_ks_output_lwe_dimension: u64, //n_small
ks_decomposition_level_count: u64, //l(BS)
ks_decomposition_base_log: u64, //b(BS)
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(KS)
br_decomposition_base_log: u64, //b(ks)
ciphertext_modulus_log: u64,
) -> Complexity;
}
pub struct KsPbs<KS: KeySwitchLWEComplexity, PBS: PbsComplexity> {
pub ks_lwe: KS,
pub pbs: PBS,
}
impl<KS, PBS> AtomicPatternComplexity for KsPbs<KS, PBS>
where
KS: KeySwitchLWEComplexity,
PBS: PbsComplexity,
{
fn complexity(
&self,
sum_size: u64,
input_lwe_dimension: u64, //n_big
internal_ks_output_lwe_dimension: u64, //n_small
ks_decomposition_level_count: u64, //l(KS)
ks_decomposition_base_log: u64, //b(KS) // not used
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(BR) // not used
br_decomposition_base_log: u64, //b(BR)
ciphertext_modulus_log: u64,
) -> Complexity {
let multisum_complexity = (sum_size * input_lwe_dimension) as f64;
let ks_complexity = {
self.ks_lwe.complexity(
input_lwe_dimension,
internal_ks_output_lwe_dimension,
ks_decomposition_level_count,
ks_decomposition_base_log,
ciphertext_modulus_log,
)
};
let pbs_complexity = self.pbs.complexity(
internal_ks_output_lwe_dimension,
glwe_polynomial_size,
glwe_dimension,
br_decomposition_level_count,
br_decomposition_base_log,
ciphertext_modulus_log,
);
multisum_complexity + ks_complexity + pbs_complexity
}
}
pub type Default = KsPbs<keyswitch_lwe::Default, pbs::Default>;
pub const DEFAULT: Default = KsPbs {
ks_lwe: keyswitch_lwe::DEFAULT,
pbs: pbs::DEFAULT,
};

View File

@@ -0,0 +1,116 @@
use super::super::complexity::Complexity;
use super::super::fft;
use fft::FftComplexity;
pub trait CmuxComplexity {
#[allow(non_snake_case)]
fn complexity(
&self,
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(BR)
br_decomposition_base_log: u64, //b(BR)
ciphertext_modulus_log: u64, //log2_q
) -> Complexity;
}
#[allow(non_snake_case)]
pub struct SimpleWithFactors<FFT: FftComplexity> {
fft: FFT,
linear_fft_factor: Option<f64>, // fft additional linear factor cost, none => size | some(w) => size * w * log2_q
linear_ifft_factor: Option<f64>, // ifft additional linear factor cost
blind_rotate_factor: f64,
constant_cost: f64, // global const
}
fn final_additional_linear_fft_factor(factor: Option<f64>, integer_size: u64) -> f64 {
match factor {
Some(w) => w * (integer_size as f64),
None => 1.0,
}
}
impl<FFT: FftComplexity> CmuxComplexity for SimpleWithFactors<FFT> {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/bootstrap.py#L145
#[allow(non_snake_case)]
fn complexity(
&self,
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(BR)
_br_decomposition_base_log: u64, //b(BR)
ciphertext_modulus_log: u64, //log2_q
) -> Complexity {
let f_glwe_polynomial_size = glwe_polynomial_size as f64;
let f_glwe_size = (glwe_dimension + 1) as f64;
let br_decomposition_level_count = br_decomposition_level_count as f64;
let f_square_glwe_size = f_glwe_size * f_glwe_size;
let additional_linear_fft_factor =
final_additional_linear_fft_factor(self.linear_fft_factor, ciphertext_modulus_log);
let additional_linear_ifft_factor =
final_additional_linear_fft_factor(self.linear_ifft_factor, ciphertext_modulus_log);
let fft_cost = f_glwe_size
* br_decomposition_level_count
* (self.fft.fft_complexity(glwe_polynomial_size)
+ additional_linear_fft_factor * f_glwe_polynomial_size);
let ifft_cost = f_glwe_size
* (self.fft.ifft_complexity(glwe_polynomial_size)
+ additional_linear_ifft_factor * f_glwe_polynomial_size);
let br_cost = self.blind_rotate_factor
* f_glwe_polynomial_size
* br_decomposition_level_count
* f_square_glwe_size;
fft_cost + ifft_cost + br_cost + self.constant_cost
}
}
pub type Default = SimpleWithFactors<fft::AsymptoticWithFactors>;
pub const DEFAULT: Default = SimpleWithFactors {
fft: fft::DEFAULT,
linear_fft_factor: None,
linear_ifft_factor: None,
blind_rotate_factor: 1.0,
constant_cost: 0.0,
};
#[cfg(test)]
pub mod tests {
use super::*;
pub const COST_AWS: Default = SimpleWithFactors {
/* https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/bootstrap.py#L145 */
fft: fft::tests::COST_AWS,
linear_fft_factor: Some(0.011647955063264166),
linear_ifft_factor: Some(0.018836852582634938),
blind_rotate_factor: 0.8418306429189878,
constant_cost: 923.7542202718637,
};
#[test]
fn golden_python_prototype() {
let ignored = 0;
let golden = 8.0;
let actual = DEFAULT.complexity(1, 1, 1, ignored, 0);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 138621.0;
let actual = DEFAULT.complexity(1, 20, 300, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 927.1215428435396;
let actual = COST_AWS.complexity(1, 1, 1, ignored, 0);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 117019.72048983313;
let actual = COST_AWS.complexity(1, 20, 300, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 7651844.24194206;
let actual = COST_AWS.complexity(1024, 10, 56, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
}
}

View File

@@ -0,0 +1,76 @@
use super::super::complexity::Complexity;
pub trait KeySwitchLWEComplexity {
fn complexity(
&self,
input_lwe_dimension: u64, //n_big
output_lwe_dimension: u64, //n_small
decomposition_level_count: u64, //l(BS)
decomposition_base_log: u64, //b(BS)
ciphertext_modulus_log: u64, //log2_q
) -> Complexity;
}
pub struct Default;
impl KeySwitchLWEComplexity for Default {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/keyswitch.py#L91
fn complexity(
&self,
input_lwe_dimension: u64, //n_big
output_lwe_dimension: u64, //n_small
decomposition_level_count: u64, //l(KS)
_decomposition_base_log: u64, //b(KS)
_ciphertext_modulus_log: u64, //log2_q
) -> Complexity {
let output_lwe_size = output_lwe_dimension + 1;
let count_decomposition = input_lwe_dimension * decomposition_level_count;
let count_mul = input_lwe_dimension * decomposition_level_count * output_lwe_size;
let count_add = (input_lwe_dimension * decomposition_level_count - 1) * output_lwe_size + 1;
(count_decomposition + count_mul + count_add) as Complexity
}
}
pub struct SimpleProductWithFactor {
factor: f64,
}
impl KeySwitchLWEComplexity for SimpleProductWithFactor {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/keyswitch.py#L100
fn complexity(
&self,
input_lwe_dimension: u64, //n_big
output_lwe_dimension: u64, //n_small
decomposition_level_count: u64, //l(BS)
_decomposition_base_log: u64, //b(BS)
ciphertext_modulus_log: u64, //log2_q
) -> Complexity {
let product = input_lwe_dimension
* output_lwe_dimension
* decomposition_level_count
* ciphertext_modulus_log;
self.factor * (product as f64)
}
}
pub const DEFAULT: Default = Default;
#[cfg(test)]
mod tests {
use super::*;
pub const COST_AWS: SimpleProductWithFactor = SimpleProductWithFactor {
factor: 0.12547239853890443,
};
#[test]
fn golden_python_prototype() {
let ignored = 0;
let golden = 134313984.0;
let actual = DEFAULT.complexity(1024, 2048, 32, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 538899848.2752727;
let actual = COST_AWS.complexity(1024, 2048, 32, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
}
}

View File

@@ -0,0 +1,4 @@
pub mod atomic_pattern;
pub mod cmux;
pub mod keyswitch_lwe;
pub mod pbs;

View File

@@ -0,0 +1,72 @@
use super::super::complexity::Complexity;
use super::cmux;
pub trait PbsComplexity {
fn complexity(
&self,
lwe_dimension: u64, //n
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(BR)
br_decomposition_base_log: u64, //b(BR)
ciphertext_modulus_log: u64, // log2_q
) -> Complexity;
}
pub struct CmuxProportional<CMUX: cmux::CmuxComplexity> {
cmux: CMUX,
}
impl<CMUX: cmux::CmuxComplexity> PbsComplexity for CmuxProportional<CMUX> {
fn complexity(
&self,
lwe_dimension: u64, //n
glwe_polynomial_size: u64, //N
glwe_dimension: u64, //k
br_decomposition_level_count: u64, //l(BR)
br_decomposition_base_log: u64, //b(BR)
ciphertext_modulus_log: u64, //log2_q
) -> Complexity {
// https://github.com/zama-ai/concrete-optimizer/blob/prototype/python/optimizer/noise_formulas/bootstrap.py#L163
let cmux_cost = self.cmux.complexity(
glwe_polynomial_size,
glwe_dimension,
br_decomposition_level_count,
br_decomposition_base_log,
ciphertext_modulus_log,
);
(lwe_dimension as f64) * cmux_cost
}
}
pub type Default = CmuxProportional<cmux::Default>;
pub const DEFAULT: Default = CmuxProportional {
cmux: cmux::DEFAULT,
};
#[cfg(test)]
pub mod tests {
use super::super::cmux;
use super::{CmuxProportional, Default, PbsComplexity, DEFAULT};
pub const COST_AWS: Default = CmuxProportional {
cmux: cmux::tests::COST_AWS,
};
#[test]
fn golden_python_prototype() {
let ignored = 0;
let golden = 8.0;
let actual = DEFAULT.complexity(1, 1, 1, 1, ignored, 32);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 249957554585600.0;
let actual = DEFAULT.complexity(1024, 4096, 1024, 56, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
let golden = 208532086206064.16;
let actual = COST_AWS.complexity(1024, 4096, 1024, 56, ignored, 64);
approx::assert_relative_eq!(golden, actual, epsilon = f64::EPSILON);
}
}

View File

@@ -74,6 +74,7 @@ pub struct Range {
}
#[must_use]
#[allow(clippy::needless_pass_by_value)]
pub fn minimal_unify(_g: unparametrized::AtomicPatternDag) -> parameter_indexed::AtomicPatternDag {
todo!()
}
@@ -161,7 +162,7 @@ pub fn domains_to_ranges(
let mut constrained_glwe_parameter_indexes = HashSet::new();
for op in &operators {
if let Operator::AtomicPattern { extra_data, .. } = op {
constrained_glwe_parameter_indexes.insert(extra_data.output_glwe_params);
let _ = constrained_glwe_parameter_indexes.insert(extra_data.output_glwe_params);
}
}

View File

@@ -5,10 +5,15 @@ use super::operator::Operator;
type Index = usize;
type InputParameterIndexed = InputParameter<Index>;
type AtomicPatternParametersIndexed = AtomicPatternParameters<Index, Index, Index, Index, Index>;
pub(crate) type OperatorParameterIndexed =
Operator<InputParameterIndexed, AtomicPatternParametersIndexed>;
pub struct AtomicPatternDag {
pub(crate) operators: Vec<
Operator<InputParameter<usize>, AtomicPatternParameters<Index, Index, Index, Index, Index>>,
>,
pub(crate) operators: Vec<OperatorParameterIndexed>,
pub(crate) parameters_count: ParameterCount,
pub(crate) reverse_map: ParameterToOperation,
}

View File

@@ -1,14 +1,9 @@
use crate::global_parameters::{ParameterRanges, ParameterToOperation};
use crate::parameters::{AtomicPatternParameters, InputParameter};
use super::operator::Operator;
type Index = usize;
use crate::graph::parameter_indexed::OperatorParameterIndexed;
#[allow(dead_code)]
pub struct AtomicPatternDag {
pub(crate) operators: Vec<
Operator<InputParameter<usize>, AtomicPatternParameters<Index, Index, Index, Index, Index>>,
>,
pub(crate) operators: Vec<OperatorParameterIndexed>,
pub(crate) parameter_ranges: ParameterRanges,
pub(crate) reverse_map: ParameterToOperation,
}

View File

@@ -1,14 +1,9 @@
use crate::global_parameters::{ParameterToOperation, ParameterValues};
use crate::parameters::{AtomicPatternParameters, InputParameter};
use super::operator::Operator;
type Index = usize;
use crate::graph::parameter_indexed::OperatorParameterIndexed;
#[allow(dead_code)]
pub struct AtomicPatternDag {
pub(crate) operators: Vec<
Operator<InputParameter<usize>, AtomicPatternParameters<Index, Index, Index, Index, Index>>,
>,
pub(crate) operators: Vec<OperatorParameterIndexed>,
pub(crate) parameter_ranges: ParameterValues,
pub(crate) reverse_map: ParameterToOperation,
}

View File

@@ -1,9 +1,19 @@
#![warn(clippy::nursery)]
#![warn(clippy::pedantic)]
#![warn(clippy::style)]
#![allow(clippy::cast_precision_loss)] // u64 to f64
#![allow(clippy::cast_possible_truncation)] // u64 to usize
#![allow(clippy::missing_panics_doc)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::missing_const_for_fn)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::must_use_candidate)]
#![allow(clippy::similar_names)]
#![allow(clippy::suboptimal_flops)]
#![allow(clippy::too_many_arguments)]
#![allow(clippy::unreadable_literal)]
#![warn(unused_results)]
pub mod computing_cost;
pub mod global_parameters;
pub mod graph;