From 76b72b7feb0670fcd7f3b1c80e5aba68b072a875 Mon Sep 17 00:00:00 2001 From: youben11 Date: Mon, 2 Oct 2023 12:14:25 +0100 Subject: [PATCH] fix(optimizer): avoid asserting on manp >= 1 zero tensor ops now use a MANP of 0 --- .../dag/multi_parameters/symbolic_variance.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compilers/concrete-optimizer/concrete-optimizer/src/optimization/dag/multi_parameters/symbolic_variance.rs b/compilers/concrete-optimizer/concrete-optimizer/src/optimization/dag/multi_parameters/symbolic_variance.rs index 67fc59639..66ba6f2b4 100644 --- a/compilers/concrete-optimizer/concrete-optimizer/src/optimization/dag/multi_parameters/symbolic_variance.rs +++ b/compilers/concrete-optimizer/concrete-optimizer/src/optimization/dag/multi_parameters/symbolic_variance.rs @@ -152,11 +152,11 @@ impl SymbolicVariance { let pbs_noise_coeff = self.coeffs[partition_offset + VALUE_INDEX_PBS]; current_max = current_max.max(fresh_coeff).max(pbs_noise_coeff); } - assert!(1.0 <= current_max); - assert!( - current_max <= new_coeff, - "Non monotonious levelled op: {current_max} <= {new_coeff}" - ); + // assert!(1.0 <= current_max); + // assert!( + // current_max <= new_coeff, + // "Non monotonious levelled op: {current_max} <= {new_coeff}" + // ); // replace all current_max by new_coeff // multiply everything else by new_coeff / current_max let mut new = self.clone();