use DuplicateCheck trait

This commit is contained in:
themighty1
2023-05-11 14:24:07 +03:00
parent b5abe4b84f
commit 29908503cd
6 changed files with 12 additions and 29 deletions

View File

@@ -287,7 +287,7 @@ impl Label {
/// Returns inner block
#[inline]
pub(crate) fn to_inner(self) -> Block {
pub fn to_inner(self) -> Block {
self.0
}

View File

@@ -1,10 +1,11 @@
use crate::{error::Error, utils::has_unique_elements};
use crate::error::Error;
use mpc_core::hash::Hash;
use rs_merkle::{
algorithms::Sha256, proof_serializers, MerkleProof as MerkleProof_rs_merkle,
MerkleTree as MerkleTree_rs_merkle,
};
use serde::{ser::Serializer, Deserialize, Deserializer, Serialize};
use utils::iter::DuplicateCheck;
pub type MerkleRoot = [u8; 32];
@@ -30,7 +31,7 @@ impl MerkleProof {
if leaf_indices.len() != leaf_hashes.len() {
return Err(Error::MerkleProofVerificationFailed);
}
if !has_unique_elements(leaf_indices) {
if leaf_indices.iter().contains_dups() {
return Err(Error::MerkleProofVerificationFailed);
}

View File

@@ -1,4 +1,5 @@
use serde::{Deserialize, Serialize};
use utils::iter::DuplicateCheck;
use crate::{
error::Error,
@@ -7,7 +8,6 @@ use crate::{
substrings_opening::{Blake3Opening, SubstringsOpening, SubstringsOpeningSet},
substrings_proof::SubstringsProof,
},
utils::has_unique_elements,
Commitment, Direction, InclusionProof, SessionData, SessionHeader, SessionHeaderMsg,
SessionProof, SubstringsCommitment, SubstringsCommitmentSet,
};
@@ -31,7 +31,7 @@ impl NotarizedSession {
/// Generates a `SubstringsProof` for commitments with the provided merkle tree indices
pub fn generate_substring_proof(&self, indices: Vec<usize>) -> Result<SubstringsProof, Error> {
// check that merkle tree indices are unique
if !has_unique_elements(&indices) {
if indices.iter().contains_dups() {
return Err(Error::WrongMerkleTreeIndices);
}

View File

@@ -1,8 +1,7 @@
use crate::{
commitment::Commitment, error::Error, transcript::Direction, utils::has_unique_elements,
};
use crate::{commitment::Commitment, error::Error, transcript::Direction};
use serde::{Deserialize, Serialize};
use std::ops::Range;
use utils::iter::DuplicateCheck;
/// A set of commitments
#[derive(Default, Serialize, Deserialize)]
@@ -27,7 +26,7 @@ impl SubstringsCommitmentSet {
// merkle_tree_index of each commitment must be unique
let ids: Vec<u32> = self.0.iter().map(|c| c.merkle_tree_index()).collect();
if !has_unique_elements(ids) {
if ids.iter().contains_dups() {
return Err(Error::ValidationError);
}

View File

@@ -1,7 +1,7 @@
use crate::{
commitment::{Blake3, Commitment},
error::Error,
utils::{encode_bytes_in_ranges, has_unique_elements, merge_slices},
utils::merge_slices,
Direction, SessionHeader, Transcript, TranscriptSlice,
};
use blake3::Hasher;
@@ -10,6 +10,7 @@ use mpc_core::hash::Hash;
use mpc_garble_core::{EncodedValue, Encoder};
use serde::{Deserialize, Serialize};
use std::ops::Range;
use utils::iter::DuplicateCheck;
/// A set of openings
#[derive(Serialize, Deserialize)]
@@ -34,7 +35,7 @@ impl SubstringsOpeningSet {
// --- merkle_tree_index of each opening must be unique
let ids: Vec<u32> = self.0.iter().map(|o| o.merkle_tree_index()).collect();
if !has_unique_elements(ids) {
if ids.iter().contains_dups() {
return Err(Error::ValidationError);
}

View File

@@ -16,16 +16,6 @@ pub fn blake3(data: &[u8]) -> [u8; 32] {
hasher.finalize().into()
}
/// Returns true if all elements of the iterator are unique
pub fn has_unique_elements<T>(iter: T) -> bool
where
T: IntoIterator,
T::Item: Eq + Hash,
{
let mut uniq = HashSet::new();
iter.into_iter().all(move |x| uniq.insert(x))
}
/// Tries to merge `slices` and returns the resulting slices sorted ascendingly (note that even if no
/// merging was necessary, the `slices` will be returned sorted ascendingly).
/// Merging happens if slices overlap or are adjacent.
@@ -160,14 +150,6 @@ pub(crate) fn encode_bytes_in_ranges(
mod tests {
use super::*;
#[test]
fn test_has_unique_elements() {
let unique: Vec<u32> = vec![1, 34, 3432, 5643];
let not_unique: Vec<u32> = vec![1, 34, 3432, 5643, 34];
assert!(has_unique_elements(unique));
assert!(!has_unique_elements(not_unique));
}
#[test]
// Expect merge_slices() to return a new vec of slices since some were merged
fn test_merge_slices_new() {