mirror of
https://github.com/zama-ai/concrete.git
synced 2026-02-09 03:55:04 -05:00
chore(cuts): remove lasts pre-cuts + simplify cuts for atomic_pattern
This commit is contained in:
@@ -7,13 +7,11 @@ use concrete_commons::dispersion::{DispersionParameter, Variance};
|
||||
|
||||
use super::decomposition;
|
||||
use super::decomposition::{
|
||||
blind_rotate, circuit_bootstrap, cut_complexity_noise, keyswitch, pp_switch, PersistDecompCache,
|
||||
blind_rotate, circuit_bootstrap, keyswitch, pp_switch, PersistDecompCache,
|
||||
};
|
||||
|
||||
// Ref time for v0 table 1 thread: 950ms
|
||||
const CUTS: bool = true; // 80ms
|
||||
const PARETO_CUTS: bool = true; // 75ms
|
||||
const CROSS_PARETO_CUTS: bool = PARETO_CUTS && true; // 70ms
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub struct Solution {
|
||||
@@ -94,30 +92,14 @@ fn update_state_with_best_decompositions(
|
||||
let mut best_variance = state.best_solution.map_or(f64::INFINITY, |s| s.noise_max);
|
||||
|
||||
let complexity_multisum = (consts.sum_size * input_lwe_dimension) as f64;
|
||||
let mut cut_complexity = best_complexity - complexity_multisum;
|
||||
let mut cut_noise = safe_variance - noise_modulus_switching;
|
||||
let br_quantities = caches
|
||||
.blind_rotate
|
||||
.pareto_quantities(glwe_params, internal_dim);
|
||||
let br_quantities = cut_complexity_noise(cut_complexity, cut_noise, br_quantities);
|
||||
if br_quantities.is_empty() {
|
||||
return;
|
||||
}
|
||||
if PARETO_CUTS {
|
||||
cut_noise -= br_quantities[br_quantities.len() - 1].noise;
|
||||
cut_complexity -= br_quantities[0].complexity;
|
||||
}
|
||||
|
||||
let ks_quantities = caches
|
||||
.keyswitch
|
||||
.pareto_quantities(glwe_params, internal_dim);
|
||||
let ks_quantities = cut_complexity_noise(cut_complexity, cut_noise, ks_quantities);
|
||||
if ks_quantities.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let i_max_ks = ks_quantities.len() - 1;
|
||||
let mut i_current_max_ks = i_max_ks;
|
||||
let square_noise_factor = square(consts.noise_factor);
|
||||
for br_quantity in br_quantities {
|
||||
// increasing complexity, decreasing variance
|
||||
@@ -130,36 +112,21 @@ fn update_state_with_best_decompositions(
|
||||
let complexity = complexity_multisum + complexity_pbs;
|
||||
if complexity > best_complexity {
|
||||
// As best can evolves it is complementary to blind_rotate_quantities cuts.
|
||||
if PARETO_CUTS {
|
||||
break;
|
||||
} else if CUTS {
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
for i_ks_pareto in (0..=i_current_max_ks).rev() {
|
||||
// increasing variance, decreasing complexity
|
||||
let ks_quantity = ks_quantities[i_ks_pareto];
|
||||
let noise_keyswitch = ks_quantity.noise;
|
||||
let noise_max = noise_in + noise_keyswitch + noise_modulus_switching;
|
||||
for &ks_quantity in ks_quantities.iter().rev() {
|
||||
let complexity_keyswitch = ks_quantity.complexity;
|
||||
let complexity = complexity_multisum + complexity_keyswitch + complexity_pbs;
|
||||
|
||||
if noise_max > safe_variance {
|
||||
if CROSS_PARETO_CUTS {
|
||||
// the pareto of 2 added pareto is scanned linearly
|
||||
// but with all cuts, pre-computing => no gain
|
||||
i_current_max_ks = usize::min(i_ks_pareto + 1, i_max_ks);
|
||||
break;
|
||||
// it's compatible with next i_br but with the worst complexity
|
||||
} else if PARETO_CUTS {
|
||||
// increasing variance => we can skip all remaining
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
} else if complexity > best_complexity {
|
||||
if complexity > best_complexity {
|
||||
continue;
|
||||
}
|
||||
|
||||
// increasing variance, decreasing complexity
|
||||
let noise_keyswitch = ks_quantity.noise;
|
||||
let noise_max = noise_in + noise_keyswitch + noise_modulus_switching;
|
||||
if noise_max > safe_variance {
|
||||
// increasing variance => we can skip all remaining
|
||||
break;
|
||||
}
|
||||
// feasible and at least as good complexity
|
||||
if complexity < best_complexity || noise_max < best_variance {
|
||||
let sigma = Variance(safe_variance).get_standard_dev() * consts.kappa;
|
||||
|
||||
@@ -62,12 +62,13 @@ fn update_best_solution_with_best_decompositions(
|
||||
if not_feasible {
|
||||
continue;
|
||||
}
|
||||
for &ks_quantity in ks_pareto.iter().rev() {
|
||||
for &ks_quantity in ks_pareto {
|
||||
let one_lut_cost = ks_quantity.complexity + br_quantity.complexity;
|
||||
let complexity = dag.complexity(input_lwe_dimension, one_lut_cost);
|
||||
let worse_complexity = complexity > best_complexity;
|
||||
if worse_complexity {
|
||||
continue;
|
||||
// Since ks_pareto is scanned by increasing complexity, we can stop
|
||||
break;
|
||||
}
|
||||
let not_feasible = !dag.feasible(
|
||||
input_noise_out,
|
||||
@@ -76,8 +77,7 @@ fn update_best_solution_with_best_decompositions(
|
||||
noise_modulus_switching,
|
||||
);
|
||||
if not_feasible {
|
||||
// Since ks_pareto is scanned by increasing noise, we can stop
|
||||
break;
|
||||
continue;
|
||||
}
|
||||
|
||||
let (peek_p_error, variance) = dag.peek_p_error(
|
||||
|
||||
@@ -12,7 +12,6 @@ use crate::utils::cache::persistent::PersistentCacheHashMap;
|
||||
use crate::{config, security};
|
||||
|
||||
use super::common::MacroParam;
|
||||
use super::cut::ComplexityNoise;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
||||
pub struct BrComplexityNoise {
|
||||
@@ -97,15 +96,6 @@ pub fn pareto_quantities(
|
||||
quantities
|
||||
}
|
||||
|
||||
impl ComplexityNoise for BrComplexityNoise {
|
||||
fn noise(&self) -> f64 {
|
||||
self.noise
|
||||
}
|
||||
fn complexity(&self) -> f64 {
|
||||
self.complexity
|
||||
}
|
||||
}
|
||||
|
||||
pub type Cache = CacheHashMap<MacroParam, Vec<BrComplexityNoise>>;
|
||||
|
||||
impl Cache {
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
pub trait ComplexityNoise {
|
||||
fn noise(&self) -> f64;
|
||||
fn complexity(&self) -> f64;
|
||||
}
|
||||
|
||||
pub fn cut_complexity_noise<E>(cut_complexity: f64, cut_noise: f64, decomps: &[E]) -> &[E]
|
||||
where
|
||||
E: ComplexityNoise,
|
||||
{
|
||||
let mut min_index = None;
|
||||
// Search first valid noise
|
||||
for (i, decomp) in decomps.iter().enumerate() {
|
||||
if decomp.noise() <= cut_noise {
|
||||
min_index = Some(i);
|
||||
break; // noise is decreasing
|
||||
}
|
||||
}
|
||||
let min_index = min_index.unwrap_or(decomps.len());
|
||||
// Search first invalid complexity
|
||||
let mut max_index = None;
|
||||
for (i, decomp) in decomps.iter().enumerate().skip(min_index) {
|
||||
if cut_complexity < decomp.complexity() {
|
||||
max_index = Some(i);
|
||||
break; // complexity is increasing
|
||||
}
|
||||
}
|
||||
let max_index = max_index.unwrap_or(decomps.len());
|
||||
if min_index == max_index {
|
||||
return &[];
|
||||
}
|
||||
assert!(min_index < max_index);
|
||||
&decomps[min_index..max_index]
|
||||
}
|
||||
@@ -14,7 +14,6 @@ use crate::utils::cache::ephemeral::{CacheHashMap, EphemeralCache};
|
||||
use crate::utils::cache::persistent::PersistentCacheHashMap;
|
||||
|
||||
use super::common::MacroParam;
|
||||
use super::cut::ComplexityNoise;
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
|
||||
pub struct KsComplexityNoise {
|
||||
@@ -23,15 +22,6 @@ pub struct KsComplexityNoise {
|
||||
pub noise: f64,
|
||||
}
|
||||
|
||||
impl ComplexityNoise for KsComplexityNoise {
|
||||
fn noise(&self) -> f64 {
|
||||
self.noise
|
||||
}
|
||||
fn complexity(&self) -> f64 {
|
||||
self.complexity
|
||||
}
|
||||
}
|
||||
|
||||
/* This is stricly variance decreasing and strictly complexity increasing */
|
||||
pub fn pareto_quantities(
|
||||
complexity_model: &dyn ComplexityModel,
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
pub mod blind_rotate;
|
||||
pub mod circuit_bootstrap;
|
||||
pub mod common;
|
||||
pub mod cut;
|
||||
pub mod keyswitch;
|
||||
pub mod pp_switch;
|
||||
|
||||
pub use common::MacroParam;
|
||||
pub use cut::cut_complexity_noise;
|
||||
|
||||
use crate::computing_cost::complexity_model::ComplexityModel;
|
||||
use crate::config;
|
||||
|
||||
Reference in New Issue
Block a user