fix: bug converting manp to variance factor

This commit is contained in:
rudy
2022-08-19 17:30:54 +02:00
committed by rudy-6-4
parent fc6120ed74
commit bf6bdcfec2
3 changed files with 5 additions and 4 deletions

View File

@@ -769,7 +769,8 @@ mod tests {
let input1 = graph.add_input(3, Shape::number());
let cpx_dot = LevelledComplexity::ADDITION;
let weights = Weights::vector([1, 2]);
let manp = 1.0 * 1.0 + 2.0 * 2_f64;
#[allow(clippy::imprecise_flops)]
let manp = (1.0 * 1.0 + 2.0 * 2_f64).sqrt();
let dot = graph.add_levelled_op([input1, input1], cpx_dot, manp, Shape::number(), "dot");
let analysis = analyze(&graph);
let one_lut_cost = 100.0;

View File

@@ -15,7 +15,6 @@ use crate::optimization::config::NoiseBoundConfig;
use crate::parameters::{BrDecompositionParameters, GlweParameters, KsDecompositionParameters};
use crate::pareto;
use crate::security::glwe::minimal_variance;
use crate::utils::square;
use super::analyze;
@@ -308,7 +307,7 @@ pub fn optimize_v0<W: UnsignedInteger>(
) -> OptimizationState {
use crate::dag::operator::{FunctionTable, Shape};
let same_scale_manp = 0.0;
let manp = square(noise_factor);
let manp = noise_factor;
let out_shape = &Shape::number();
let complexity = LevelledComplexity::ADDITION * sum_size;
let comment = "dot";
@@ -344,6 +343,7 @@ mod tests {
use super::*;
use crate::optimization::atomic_pattern;
use crate::utils::square;
fn small_relative_diff(v1: f64, v2: f64) -> bool {
f64::abs(v1 - v2) / f64::max(v1, v2) <= f64::EPSILON

View File

@@ -73,7 +73,7 @@ impl SymbolicVariance {
}
pub fn manp_to_variance_factor(manp: f64) -> f64 {
manp
manp * manp
}
pub fn dominate_or_equal(&self, other: &Self) -> bool {