mirror of
https://github.com/tlsnotary/rs-merkle.git
synced 2026-01-09 15:07:54 -05:00
Merge remote-tracking branch 'origin/master'
This commit is contained in:
41
.github/workflows/publish.yml
vendored
41
.github/workflows/publish.yml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -18,6 +18,45 @@ jobs:
|
||||
override: true
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- name: Install Clippy
|
||||
run: rustup component add clippy
|
||||
- name: Run linter
|
||||
run: cargo clippy -- -D warnings
|
||||
format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
publish:
|
||||
needs: [test, lint, format]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- shell: bash
|
||||
env:
|
||||
CARGO_TOKEN: ${{ secrets.CARGO_TOKEN }}
|
||||
|
||||
30
.github/workflows/test.yml
vendored
30
.github/workflows/test.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -19,3 +19,31 @@ jobs:
|
||||
override: true
|
||||
- name: Run tests
|
||||
run: cargo test
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- name: Install Clippy
|
||||
run: rustup component add clippy
|
||||
- name: Run linter
|
||||
run: cargo clippy -- -D warnings
|
||||
format:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
- name: Check format
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
//! This module contains built-in implementations of `rs_merkle::Hasher`
|
||||
mod sha256;
|
||||
|
||||
pub use sha256::Sha256Algorithm as Sha256;
|
||||
pub use sha256::Sha256Algorithm as Sha256;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use sha2::{Sha256, Digest, digest::FixedOutput};
|
||||
use crate::Hasher;
|
||||
use sha2::{digest::FixedOutput, Digest, Sha256};
|
||||
|
||||
/// Sha256 implementation of the `rs_merkle::Hasher` trait
|
||||
#[derive(Clone)]
|
||||
@@ -14,4 +14,4 @@ impl Hasher for Sha256Algorithm {
|
||||
hasher.update(data);
|
||||
<[u8; 32]>::from(hasher.finalize_fixed())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
14
src/error.rs
14
src/error.rs
@@ -1,15 +1,15 @@
|
||||
use std::fmt::{Debug, Formatter, Display};
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum ErrorKind {
|
||||
SerializedProofSizeIsIncorrect,
|
||||
NotEnoughHelperNodes
|
||||
NotEnoughHelperNodes,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Error {
|
||||
kind: ErrorKind,
|
||||
message: String
|
||||
message: String,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
@@ -20,7 +20,7 @@ impl Error {
|
||||
pub fn not_enough_helper_nodes() -> Self {
|
||||
Self::new(
|
||||
ErrorKind::NotEnoughHelperNodes,
|
||||
String::from("Not enough hashes to reconstruct the root")
|
||||
String::from("Not enough hashes to reconstruct the root"),
|
||||
)
|
||||
}
|
||||
|
||||
@@ -28,7 +28,9 @@ impl Error {
|
||||
self.kind
|
||||
}
|
||||
|
||||
pub fn message(&self) -> &str { &self.message }
|
||||
pub fn message(&self) -> &str {
|
||||
&self.message
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
@@ -37,4 +39,4 @@ impl Display for Error {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,15 +53,15 @@ pub trait Hasher: Clone {
|
||||
/// if the left node doesn't have a sibling it is concatenated to itself and
|
||||
/// then hashed instead of just being propagated to the next level.
|
||||
fn concat_and_hash(left: &Self::Hash, right: Option<&Self::Hash>) -> Self::Hash {
|
||||
let mut concatenated: Vec<u8> = left.clone().into();
|
||||
let mut concatenated: Vec<u8> = (*left).into();
|
||||
|
||||
match right {
|
||||
Some(right_node) => {
|
||||
let mut right_node_clone: Vec<u8> = right_node.clone().into();
|
||||
let mut right_node_clone: Vec<u8> = (*right_node).into();
|
||||
concatenated.append(&mut right_node_clone);
|
||||
Self::hash(&concatenated)
|
||||
},
|
||||
None => left.clone()
|
||||
}
|
||||
None => *left,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,4 +71,4 @@ pub trait Hasher: Clone {
|
||||
fn hash_size() -> usize {
|
||||
mem::size_of::<Self::Hash>()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,12 +11,11 @@ pub use merkle_proof::MerkleProof;
|
||||
pub use merkle_tree::MerkleTree;
|
||||
pub use partial_tree::PartialTree;
|
||||
|
||||
mod merkle_tree;
|
||||
mod merkle_proof;
|
||||
mod partial_tree;
|
||||
mod hasher;
|
||||
mod merkle_proof;
|
||||
mod merkle_tree;
|
||||
mod partial_tree;
|
||||
|
||||
pub mod algorithms;
|
||||
pub mod error;
|
||||
mod utils;
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::convert::TryInto;
|
||||
|
||||
use crate::{Hasher, utils};
|
||||
use crate::error::Error;
|
||||
use crate::error::ErrorKind;
|
||||
use crate::partial_tree::PartialTree;
|
||||
use crate::{utils, Hasher};
|
||||
|
||||
/// `MerkleProof` is used to parse, verify, calculate a root for merkle proofs.
|
||||
///
|
||||
@@ -29,9 +29,7 @@ pub struct MerkleProof<T: Hasher> {
|
||||
|
||||
impl<T: Hasher> MerkleProof<T> {
|
||||
pub fn new(proof_hashes: Vec<T::Hash>) -> Self {
|
||||
MerkleProof {
|
||||
proof_hashes,
|
||||
}
|
||||
MerkleProof { proof_hashes }
|
||||
}
|
||||
|
||||
/// Parses proof serialized as bytes
|
||||
@@ -42,20 +40,29 @@ impl<T: Hasher> MerkleProof<T> {
|
||||
if bytes.len() % hash_size != 0 {
|
||||
return Err(Error::new(
|
||||
ErrorKind::SerializedProofSizeIsIncorrect,
|
||||
format!("Proof of size {} bytes can not be divided into chunks of {} bytes", bytes.len(), hash_size)));
|
||||
format!(
|
||||
"Proof of size {} bytes can not be divided into chunks of {} bytes",
|
||||
bytes.len(),
|
||||
hash_size
|
||||
),
|
||||
));
|
||||
}
|
||||
|
||||
let hashes_count = bytes.len() / hash_size;
|
||||
let proof_hashes_slices: Vec<T::Hash> = (0..hashes_count)
|
||||
.map(|i| {
|
||||
let x: Vec<u8> = bytes.get(i * hash_size..(i + 1) * hash_size).unwrap().try_into().unwrap();
|
||||
let x: Vec<u8> = bytes
|
||||
.get(i * hash_size..(i + 1) * hash_size)
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
match x.try_into() {
|
||||
Ok(val) => val,
|
||||
// Because of the check above the initial bytes are always slices perfectly
|
||||
// into appropriately sized hashes.
|
||||
// Unwrap is not used here due to more complex trait bounds on T::Hash
|
||||
// that would be require to satisfy .unwrap usage
|
||||
Err(_) => panic!("Unexpected error during proof parsing")
|
||||
Err(_) => panic!("Unexpected error during proof parsing"),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@@ -76,15 +83,25 @@ impl<T: Hasher> MerkleProof<T> {
|
||||
}
|
||||
|
||||
/// Calculates merkle root based on provided leaves and proof hashes
|
||||
pub fn root(&self, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> T::Hash {
|
||||
pub fn root(
|
||||
&self,
|
||||
leaf_indices: &[usize],
|
||||
leaf_hashes: &[T::Hash],
|
||||
total_leaves_count: usize,
|
||||
) -> T::Hash {
|
||||
let tree_depth = utils::indices::tree_depth(total_leaves_count);
|
||||
|
||||
// Zipping indices and hashes into a vector of (original_index_in_tree, leaf_hash)
|
||||
let mut leaf_tuples: Vec<(usize, T::Hash)> = leaf_indices.iter().cloned().zip(leaf_hashes.iter().cloned()).collect();
|
||||
let mut leaf_tuples: Vec<(usize, T::Hash)> = leaf_indices
|
||||
.iter()
|
||||
.cloned()
|
||||
.zip(leaf_hashes.iter().cloned())
|
||||
.collect();
|
||||
// Sorting leaves by indexes in case they weren't sorted already
|
||||
leaf_tuples.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
// Getting back _sorted_ indices
|
||||
let proof_indices_by_layers = utils::indices::proof_indices_by_layers(leaf_indices, total_leaves_count);
|
||||
let proof_indices_by_layers =
|
||||
utils::indices::proof_indices_by_layers(leaf_indices, total_leaves_count);
|
||||
|
||||
// The next lines copy hashes from proof hashes and group them by layer index
|
||||
let mut proof_layers: Vec<Vec<(usize, T::Hash)>> = Vec::with_capacity(tree_depth + 1);
|
||||
@@ -98,31 +115,47 @@ impl<T: Hasher> MerkleProof<T> {
|
||||
Some(first_layer) => {
|
||||
first_layer.append(&mut leaf_tuples);
|
||||
first_layer.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
},
|
||||
None => proof_layers.push(leaf_tuples)
|
||||
}
|
||||
None => proof_layers.push(leaf_tuples),
|
||||
}
|
||||
|
||||
// TODO: remove the unwrap!
|
||||
let partial_tree = PartialTree::<T>::build(proof_layers, tree_depth).unwrap();
|
||||
|
||||
return partial_tree.root().unwrap().clone();
|
||||
*partial_tree.root().unwrap()
|
||||
}
|
||||
|
||||
/// Calculates the root and serializes it into a hex string
|
||||
pub fn hex_root(&self, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> String {
|
||||
pub fn hex_root(
|
||||
&self,
|
||||
leaf_indices: &[usize],
|
||||
leaf_hashes: &[T::Hash],
|
||||
total_leaves_count: usize,
|
||||
) -> String {
|
||||
let root = self.root(leaf_indices, leaf_hashes, total_leaves_count);
|
||||
utils::collections::to_hex_string(&root)
|
||||
}
|
||||
|
||||
/// Verifies
|
||||
pub fn verify(&self, root: T::Hash, leaf_indices: &[usize], leaf_hashes: &[T::Hash], total_leaves_count: usize) -> bool {
|
||||
pub fn verify(
|
||||
&self,
|
||||
root: T::Hash,
|
||||
leaf_indices: &[usize],
|
||||
leaf_hashes: &[T::Hash],
|
||||
total_leaves_count: usize,
|
||||
) -> bool {
|
||||
let extracted_root = self.root(leaf_indices, leaf_hashes, total_leaves_count);
|
||||
root == extracted_root
|
||||
}
|
||||
|
||||
/// Serializes proof hashes to a flat vector of bytes
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
let vectors: Vec<Vec<u8>> = self.proof_hashes().iter().cloned().map(|hash| hash.into()).collect();
|
||||
let vectors: Vec<Vec<u8>> = self
|
||||
.proof_hashes()
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|hash| hash.into())
|
||||
.collect();
|
||||
vectors.iter().cloned().flatten().collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{utils, MerkleProof, Hasher};
|
||||
use crate::utils::indices::{parent_indices, proof_indices_by_layers};
|
||||
use crate::partial_tree::PartialTree;
|
||||
use crate::utils::indices;
|
||||
use crate::{utils, Hasher, MerkleProof};
|
||||
|
||||
/// [`MerkleTree`] is a Merkle Tree that is well suited for both basic and advanced usage.
|
||||
///
|
||||
@@ -98,6 +98,12 @@ pub struct MerkleTree<T: Hasher> {
|
||||
uncommitted_leaves: Vec<T::Hash>,
|
||||
}
|
||||
|
||||
impl<T: Hasher> Default for MerkleTree<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Hasher> MerkleTree<T> {
|
||||
/// Creates a new instance of Merkle Tree. Requires specifying the hash algorithm.
|
||||
///
|
||||
@@ -174,8 +180,8 @@ impl<T: Hasher> MerkleTree<T> {
|
||||
for index in helper_indices {
|
||||
match tree_layer.get(index) {
|
||||
Some(hash) => {
|
||||
helpers_layer.push((index, hash.clone()));
|
||||
},
|
||||
helpers_layer.push((index, *hash));
|
||||
}
|
||||
// This means that there's no right sibling to the current index, thus
|
||||
// we don't need to include anything in the proof for that index
|
||||
None => continue,
|
||||
@@ -183,7 +189,7 @@ impl<T: Hasher> MerkleTree<T> {
|
||||
}
|
||||
|
||||
helper_nodes.push(helpers_layer);
|
||||
current_layer_indices = parent_indices(¤t_layer_indices);
|
||||
current_layer_indices = indices::parent_indices(¤t_layer_indices);
|
||||
}
|
||||
|
||||
helper_nodes
|
||||
@@ -209,7 +215,12 @@ impl<T: Hasher> MerkleTree<T> {
|
||||
pub fn layers_hex(&self) -> Vec<Vec<String>> {
|
||||
self.layers()
|
||||
.iter()
|
||||
.map(|layer| layer.iter().map(utils::collections::to_hex_string).collect())
|
||||
.map(|layer| {
|
||||
layer
|
||||
.iter()
|
||||
.map(utils::collections::to_hex_string)
|
||||
.collect()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -276,10 +287,19 @@ impl<T: Hasher> MerkleTree<T> {
|
||||
/// Creates a diff from a changes that weren't committed to the main tree yet. Can be used
|
||||
/// to get uncommitted root or can be merged with the main tree
|
||||
fn uncommitted_diff(&self) -> Option<PartialTree<T>> {
|
||||
let shadow_indices: Vec<usize> = self.uncommitted_leaves.iter().enumerate().map(|(index, _)| index).collect();
|
||||
let shadow_indices: Vec<usize> = self
|
||||
.uncommitted_leaves
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, _)| index)
|
||||
.collect();
|
||||
// Tuples (index, hash) needed to construct a partial tree, since partial tree can't
|
||||
// maintain indices otherwise
|
||||
let mut shadow_node_tuples: Vec<(usize, T::Hash)> = shadow_indices.iter().cloned().zip(self.uncommitted_leaves.iter().cloned()).collect();
|
||||
let mut shadow_node_tuples: Vec<(usize, T::Hash)> = shadow_indices
|
||||
.iter()
|
||||
.cloned()
|
||||
.zip(self.uncommitted_leaves.iter().cloned())
|
||||
.collect();
|
||||
let mut partial_tree_tuples = self.helper_node_tuples(&shadow_indices);
|
||||
|
||||
// Figuring what tree height would be if we've committed the changes
|
||||
@@ -293,8 +313,8 @@ impl<T: Hasher> MerkleTree<T> {
|
||||
Some(first_layer) => {
|
||||
first_layer.append(&mut shadow_node_tuples);
|
||||
first_layer.sort_by(|(a, _), (b, _)| a.cmp(b));
|
||||
},
|
||||
None => partial_tree_tuples.push(shadow_node_tuples)
|
||||
}
|
||||
None => partial_tree_tuples.push(shadow_node_tuples),
|
||||
}
|
||||
|
||||
// Building a partial tree with the changes that would be needed to the working tree
|
||||
|
||||
@@ -1,12 +1,20 @@
|
||||
use crate::{Hasher, utils};
|
||||
use crate::error::{Error, ErrorKind};
|
||||
use crate::error::Error;
|
||||
use crate::{utils, Hasher};
|
||||
|
||||
type PartialTreeLayer<H> = Vec<(usize, H)>;
|
||||
|
||||
/// Partial tree represents a part of the original tree that is enough to calculate the root.
|
||||
/// Used in to extract the root in a merkle proof, to apply diff to a tree or to merge
|
||||
/// multiple trees into one
|
||||
#[derive(Clone)]
|
||||
pub struct PartialTree<T: Hasher> {
|
||||
layers: Vec<Vec<(usize, T::Hash)>>
|
||||
layers: Vec<Vec<(usize, T::Hash)>>,
|
||||
}
|
||||
|
||||
impl<T: Hasher> Default for PartialTree<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Hasher> PartialTree<T> {
|
||||
@@ -21,10 +29,7 @@ impl<T: Hasher> PartialTree<T> {
|
||||
pub fn from_leaves(leaves: &[T::Hash]) -> Result<Self, Error> {
|
||||
let leaf_tuples: Vec<(usize, T::Hash)> = leaves.iter().cloned().enumerate().collect();
|
||||
|
||||
Self::build(
|
||||
vec![leaf_tuples],
|
||||
utils::indices::tree_depth(leaves.len())
|
||||
)
|
||||
Self::build(vec![leaf_tuples], utils::indices::tree_depth(leaves.len()))
|
||||
}
|
||||
|
||||
pub fn build(partial_layers: Vec<Vec<(usize, T::Hash)>>, depth: usize) -> Result<Self, Error> {
|
||||
@@ -35,12 +40,16 @@ impl<T: Hasher> PartialTree<T> {
|
||||
/// This is a general algorithm for building a partial tree. It can be used to extract root
|
||||
/// from merkle proof, or if a complete set of leaves provided as a first argument and no
|
||||
/// helper indices given, will construct the whole tree.
|
||||
fn build_tree(mut partial_layers: Vec<Vec<(usize, T::Hash)>>, full_tree_depth: usize) -> Result<Vec<Vec<(usize, T::Hash)>>, Error> {
|
||||
fn build_tree(
|
||||
mut partial_layers: Vec<Vec<(usize, T::Hash)>>,
|
||||
full_tree_depth: usize,
|
||||
) -> Result<Vec<PartialTreeLayer<T::Hash>>, Error> {
|
||||
let mut partial_tree: Vec<Vec<(usize, T::Hash)>> = Vec::new();
|
||||
let mut current_layer = Vec::new();
|
||||
|
||||
// Reversing helper nodes, so we can remove one layer starting from 0 each iteration
|
||||
let mut reversed_layers: Vec<Vec<(usize, T::Hash)>> = partial_layers.drain(..).rev().collect();
|
||||
let mut reversed_layers: Vec<Vec<(usize, T::Hash)>> =
|
||||
partial_layers.drain(..).rev().collect();
|
||||
|
||||
// This iterates to full_tree_depth and not to the partial_layers_len because
|
||||
// when constructing
|
||||
@@ -66,11 +75,10 @@ impl<T: Hasher> PartialTree<T> {
|
||||
match nodes.get(i * 2) {
|
||||
// Populate `current_layer` back for the next iteration
|
||||
Some(left_node) => current_layer.push((
|
||||
parent_node_index.clone(),
|
||||
T::concat_and_hash(left_node, nodes.get(i * 2 + 1)
|
||||
)
|
||||
*parent_node_index,
|
||||
T::concat_and_hash(left_node, nodes.get(i * 2 + 1)),
|
||||
)),
|
||||
None => return Err(Error::not_enough_helper_nodes())
|
||||
None => return Err(Error::not_enough_helper_nodes()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,13 +100,8 @@ impl<T: Hasher> PartialTree<T> {
|
||||
|
||||
pub fn contains(&self, layer_index: usize, node_index: usize) -> bool {
|
||||
match self.layers().get(layer_index) {
|
||||
Some(layer) => {
|
||||
match layer.iter().position(|(index, _)| index.clone() == node_index) {
|
||||
Some(_) => true,
|
||||
None => false
|
||||
}
|
||||
},
|
||||
None => false
|
||||
Some(layer) => layer.iter().any(|(index, _)| *index == node_index),
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -111,7 +114,11 @@ impl<T: Hasher> PartialTree<T> {
|
||||
pub fn merge_unverified(&mut self, other: Self) {
|
||||
// Figure out new tree depth after merge
|
||||
let depth_difference = other.layers().len() - self.layers().len();
|
||||
let combined_tree_size = if depth_difference > 0 { other.layers().len() } else { self.layers().len() };
|
||||
let combined_tree_size = if depth_difference > 0 {
|
||||
other.layers().len()
|
||||
} else {
|
||||
self.layers().len()
|
||||
};
|
||||
|
||||
for layer_index in 0..combined_tree_size {
|
||||
let mut combined_layer: Vec<(usize, T::Hash)> = Vec::new();
|
||||
@@ -119,7 +126,7 @@ impl<T: Hasher> PartialTree<T> {
|
||||
if let Some(self_layer) = self.layers().get(layer_index) {
|
||||
let mut filtered_layer: Vec<(usize, T::Hash)> = self_layer
|
||||
.iter()
|
||||
.filter(|(node_index, _)| !other.contains(layer_index, node_index.clone()) )
|
||||
.filter(|(node_index, _)| !other.contains(layer_index, *node_index))
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
@@ -142,10 +149,8 @@ impl<T: Hasher> PartialTree<T> {
|
||||
Some(layer) => {
|
||||
layer.clear();
|
||||
layer.append(new_layer.as_mut())
|
||||
},
|
||||
None => {
|
||||
self.layers.push(new_layer)
|
||||
}
|
||||
None => self.layers.push(new_layer),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,9 +158,7 @@ impl<T: Hasher> PartialTree<T> {
|
||||
let hashes: Vec<Vec<T::Hash>> = self
|
||||
.layers()
|
||||
.iter()
|
||||
.map(|layer|
|
||||
layer.iter().cloned().map(|(_, hash)| hash).collect()
|
||||
)
|
||||
.map(|layer| layer.iter().cloned().map(|(_, hash)| hash).collect())
|
||||
.collect();
|
||||
|
||||
hashes
|
||||
@@ -170,4 +173,4 @@ impl<T: Hasher> PartialTree<T> {
|
||||
pub fn clear(&mut self) {
|
||||
self.layers.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,10 +4,7 @@ fn byte_to_hex(byte: &u8) -> String {
|
||||
|
||||
/// Serializes bytes into a hex string
|
||||
pub fn to_hex_string<T: Clone + Into<Vec<u8>>>(bytes: &T) -> String {
|
||||
let hex_vec: Vec<String> = bytes.clone().into()
|
||||
.iter()
|
||||
.map(byte_to_hex)
|
||||
.collect();
|
||||
let hex_vec: Vec<String> = bytes.clone().into().iter().map(byte_to_hex).collect();
|
||||
|
||||
hex_vec.join("")
|
||||
}
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
use crate::utils;
|
||||
use std::collections::{HashMap};
|
||||
|
||||
pub struct LayerInfo {
|
||||
index: usize,
|
||||
leaves_count: usize,
|
||||
}
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn is_left_index(index: usize) -> bool {
|
||||
index % 2 == 0
|
||||
@@ -27,7 +22,7 @@ pub fn parent_index(index: usize) -> usize {
|
||||
if is_left_index(index) {
|
||||
return index / 2;
|
||||
}
|
||||
return get_sibling_index(index) / 2;
|
||||
get_sibling_index(index) / 2
|
||||
}
|
||||
|
||||
pub fn parent_indices(indices: &[usize]) -> Vec<usize> {
|
||||
@@ -44,10 +39,6 @@ pub fn tree_depth(leaves_count: usize) -> usize {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_leaves_count_at_depth(depth: usize) -> usize {
|
||||
return (2 as u32).pow(depth as u32) as usize;
|
||||
}
|
||||
|
||||
pub fn uneven_layers(tree_leaves_count: usize) -> HashMap<usize, usize> {
|
||||
let mut leaves_count = tree_leaves_count;
|
||||
let depth = tree_depth(tree_leaves_count);
|
||||
@@ -57,16 +48,19 @@ pub fn uneven_layers(tree_leaves_count: usize) -> HashMap<usize, usize> {
|
||||
for index in 0..depth {
|
||||
let uneven_layer = leaves_count % 2 != 0;
|
||||
if uneven_layer {
|
||||
uneven_layers.insert(index, leaves_count.clone());
|
||||
uneven_layers.insert(index, leaves_count);
|
||||
}
|
||||
leaves_count = div_ceil(leaves_count, 2);
|
||||
}
|
||||
|
||||
return uneven_layers;
|
||||
uneven_layers
|
||||
}
|
||||
|
||||
/// Returns layered proof indices
|
||||
pub fn proof_indices_by_layers(sorted_leaf_indices: &[usize], leaves_count: usize) -> Vec<Vec<usize>> {
|
||||
pub fn proof_indices_by_layers(
|
||||
sorted_leaf_indices: &[usize],
|
||||
leaves_count: usize,
|
||||
) -> Vec<Vec<usize>> {
|
||||
let depth = tree_depth(leaves_count);
|
||||
let uneven_layers = uneven_layers(leaves_count);
|
||||
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
//! Utilities used internally to manipulate tree indices
|
||||
pub mod collections;
|
||||
pub mod indices;
|
||||
pub mod collections;
|
||||
@@ -1,5 +1,5 @@
|
||||
use rayon::prelude::*;
|
||||
use rs_merkle::{Hasher, MerkleTree, algorithms::Sha256};
|
||||
use rs_merkle::{algorithms::Sha256, Hasher, MerkleTree};
|
||||
|
||||
pub struct TestData {
|
||||
pub leaf_values: Vec<String>,
|
||||
@@ -22,7 +22,7 @@ fn combine<T: Clone>(active: Vec<T>, rest: Vec<T>, mut combinations: Vec<Vec<T>>
|
||||
combinations = combine(next, rest.clone().drain(1..).collect(), combinations);
|
||||
combinations = combine(active, rest.clone().drain(1..).collect(), combinations);
|
||||
combinations
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Create all possible combinations of elements inside a vector without duplicates
|
||||
@@ -41,14 +41,14 @@ pub fn setup() -> TestData {
|
||||
TestData {
|
||||
leaf_values: leaf_values.iter().cloned().map(String::from).collect(),
|
||||
leaf_hashes,
|
||||
expected_root_hex: String::from(expected_root_hex)
|
||||
expected_root_hex: String::from(expected_root_hex),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ProofTestCases {
|
||||
pub merkle_tree: MerkleTree<Sha256>,
|
||||
pub cases: Vec<MerkleProofTestCase>
|
||||
pub cases: Vec<MerkleProofTestCase>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -68,7 +68,9 @@ impl MerkleProofTestCase {
|
||||
}
|
||||
|
||||
pub fn setup_proof_test_cases() -> Vec<ProofTestCases> {
|
||||
let max_case = ["a", "b", "c", "d", "e", "f", "g", "h", "k", "l", "m", "o", "p", "r", "s"];
|
||||
let max_case = [
|
||||
"a", "b", "c", "d", "e", "f", "g", "h", "k", "l", "m", "o", "p", "r", "s",
|
||||
];
|
||||
|
||||
max_case
|
||||
.par_iter()
|
||||
@@ -96,14 +98,16 @@ pub fn setup_proof_test_cases() -> Vec<ProofTestCases> {
|
||||
.cloned()
|
||||
.map(|index_combination| {
|
||||
MerkleProofTestCase::new(
|
||||
index_combination.iter().map(|index| leaves.get(*index).unwrap().clone()).collect(),
|
||||
index_combination
|
||||
.iter()
|
||||
.map(|index| leaves.get(*index).unwrap().clone())
|
||||
.collect(),
|
||||
index_combination,
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let case = ProofTestCases { merkle_tree, cases };
|
||||
case
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,9 @@ mod common;
|
||||
|
||||
pub mod root {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256, Hasher};
|
||||
use std::time::Instant;
|
||||
use rayon::prelude::*;
|
||||
use rs_merkle::{algorithms::Sha256, Hasher, MerkleTree};
|
||||
use std::time::Instant;
|
||||
|
||||
#[test]
|
||||
pub fn should_return_a_correct_root() {
|
||||
@@ -12,18 +12,31 @@ pub mod root {
|
||||
let expected_root = test_data.expected_root_hex.clone();
|
||||
let leaf_hashes = &test_data.leaf_hashes;
|
||||
let indices_to_prove = vec![3, 4];
|
||||
let leaves_to_prove: Vec<[u8; 32]> = indices_to_prove.iter().cloned().map(|i| leaf_hashes.get(i).unwrap().clone()).collect();
|
||||
let leaves_to_prove: Vec<[u8; 32]> = indices_to_prove
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|i| leaf_hashes.get(i).unwrap().clone())
|
||||
.collect();
|
||||
|
||||
let merkle_tree = MerkleTree::<Sha256>::from_leaves(&test_data.leaf_hashes);
|
||||
let proof = merkle_tree.proof(&indices_to_prove);
|
||||
let extracted_root = proof.hex_root(&indices_to_prove, &leaves_to_prove, test_data.leaf_values.len());
|
||||
let extracted_root = proof.hex_root(
|
||||
&indices_to_prove,
|
||||
&leaves_to_prove,
|
||||
test_data.leaf_values.len(),
|
||||
);
|
||||
|
||||
assert_eq!(extracted_root, expected_root);
|
||||
|
||||
let test_preparation_started = Instant::now();
|
||||
let test_cases = common::setup_proof_test_cases();
|
||||
println!("Preparing test cases took {:.2}s", test_preparation_started.elapsed().as_secs_f32());
|
||||
let test_cases_count = test_cases.iter().fold(0, |acc, case| acc + case.cases.len());
|
||||
println!(
|
||||
"Preparing test cases took {:.2}s",
|
||||
test_preparation_started.elapsed().as_secs_f32()
|
||||
);
|
||||
let test_cases_count = test_cases
|
||||
.iter()
|
||||
.fold(0, |acc, case| acc + case.cases.len());
|
||||
|
||||
// Roots:
|
||||
// 1: ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb
|
||||
@@ -42,7 +55,6 @@ pub mod root {
|
||||
// 14: 4e4afdcec057392d1a735b39f41d4f3ef1cab5637c91f5443996079b3c763538
|
||||
// 15: a2e073232cb6285fa5f04957dfe6a3238a9dce003908932231174884e5861767
|
||||
|
||||
|
||||
let test_run_started = Instant::now();
|
||||
test_cases.par_iter().for_each(|test_case| {
|
||||
let merkle_tree = &test_case.merkle_tree;
|
||||
@@ -50,30 +62,36 @@ pub mod root {
|
||||
|
||||
test_case.cases.par_iter().for_each(|case| {
|
||||
let proof = merkle_tree.proof(&case.leaf_indices_to_prove);
|
||||
let extracted_root = proof.root(&case.leaf_indices_to_prove, &case.leaf_hashes_to_prove, merkle_tree.leaves().unwrap().len());
|
||||
let extracted_root = proof.root(
|
||||
&case.leaf_indices_to_prove,
|
||||
&case.leaf_hashes_to_prove,
|
||||
merkle_tree.leaves().unwrap().len(),
|
||||
);
|
||||
|
||||
assert_eq!(extracted_root, root)
|
||||
});
|
||||
});
|
||||
println!("{} test cases executed in {:.2}s", test_cases_count, test_run_started.elapsed().as_secs_f32());
|
||||
println!(
|
||||
"{} test cases executed in {:.2}s",
|
||||
test_cases_count,
|
||||
test_run_started.elapsed().as_secs_f32()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub mod to_bytes {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256};
|
||||
use rs_merkle::{algorithms::Sha256, MerkleTree};
|
||||
|
||||
#[test]
|
||||
pub fn should_correctly_serialize_to_bytes() {
|
||||
let expected_bytes: Vec<u8> = vec![
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181,
|
||||
53, 104, 133, 165, 51, 147, 162, 2, 157, 36, 19, 148,
|
||||
153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200,
|
||||
54, 16, 235, 202, 26, 5, 156, 11, 174, 130, 85, 235,
|
||||
162, 249, 91, 228, 209, 215, 188, 250, 137, 215, 36, 138,
|
||||
130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92,
|
||||
72, 113, 79, 34, 24, 15, 37, 173, 131, 101, 181, 63,
|
||||
151, 121, 247, 157, 196, 163, 215, 233, 57, 99, 249, 74
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181, 53, 104, 133, 165, 51, 147, 162,
|
||||
2, 157, 36, 19, 148, 153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200, 54, 16,
|
||||
235, 202, 26, 5, 156, 11, 174, 130, 85, 235, 162, 249, 91, 228, 209, 215, 188, 250,
|
||||
137, 215, 36, 138, 130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92, 72, 113, 79,
|
||||
34, 24, 15, 37, 173, 131, 101, 181, 63, 151, 121, 247, 157, 196, 163, 215, 233, 57, 99,
|
||||
249, 74,
|
||||
];
|
||||
|
||||
let test_data = common::setup();
|
||||
@@ -88,8 +106,8 @@ pub mod to_bytes {
|
||||
}
|
||||
|
||||
pub mod from_bytes {
|
||||
use rs_merkle::{MerkleProof, algorithms::Sha256};
|
||||
use crate::common;
|
||||
use rs_merkle::{algorithms::Sha256, MerkleProof};
|
||||
|
||||
#[test]
|
||||
pub fn should_return_result_with_proof() {
|
||||
@@ -100,14 +118,12 @@ pub mod from_bytes {
|
||||
];
|
||||
|
||||
let bytes: Vec<u8> = vec![
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181,
|
||||
53, 104, 133, 165, 51, 147, 162, 2, 157, 36, 19, 148,
|
||||
153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200,
|
||||
54, 16, 235, 202, 26, 5, 156, 11, 174, 130, 85, 235,
|
||||
162, 249, 91, 228, 209, 215, 188, 250, 137, 215, 36, 138,
|
||||
130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92,
|
||||
72, 113, 79, 34, 24, 15, 37, 173, 131, 101, 181, 63,
|
||||
151, 121, 247, 157, 196, 163, 215, 233, 57, 99, 249, 74
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181, 53, 104, 133, 165, 51, 147, 162,
|
||||
2, 157, 36, 19, 148, 153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200, 54, 16,
|
||||
235, 202, 26, 5, 156, 11, 174, 130, 85, 235, 162, 249, 91, 228, 209, 215, 188, 250,
|
||||
137, 215, 36, 138, 130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92, 72, 113, 79,
|
||||
34, 24, 15, 37, 173, 131, 101, 181, 63, 151, 121, 247, 157, 196, 163, 215, 233, 57, 99,
|
||||
249, 74,
|
||||
];
|
||||
|
||||
let proof = MerkleProof::<Sha256>::from_bytes(bytes).unwrap();
|
||||
@@ -119,17 +135,18 @@ pub mod from_bytes {
|
||||
#[test]
|
||||
pub fn should_return_error_when_proof_can_not_be_parsed() {
|
||||
let bytes: Vec<u8> = vec![
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181,
|
||||
53, 104, 133, 165, 51, 147, 162, 2, 157, 36, 19, 148,
|
||||
153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200,
|
||||
54, 16, 235, 202, 26, 5, 156, 11, 174, 130, 85, 235,
|
||||
162, 249, 91, 228, 209, 215, 188, 250, 137, 215, 36, 138,
|
||||
130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92,
|
||||
72, 113, 79, 34, 24, 15, 37, 173, 131, 101, 181, 63,
|
||||
46, 125, 44, 3, 169, 80, 122, 226, 101, 236, 245, 181, 53, 104, 133, 165, 51, 147, 162,
|
||||
2, 157, 36, 19, 148, 153, 114, 101, 161, 162, 90, 239, 198, 37, 47, 16, 200, 54, 16,
|
||||
235, 202, 26, 5, 156, 11, 174, 130, 85, 235, 162, 249, 91, 228, 209, 215, 188, 250,
|
||||
137, 215, 36, 138, 130, 217, 241, 17, 229, 160, 31, 238, 20, 224, 237, 92, 72, 113, 79,
|
||||
34, 24, 15, 37, 173, 131, 101, 181, 63,
|
||||
];
|
||||
|
||||
let err = MerkleProof::<Sha256>::from_bytes(bytes).err().unwrap();
|
||||
|
||||
assert_eq!(err.message(), "Proof of size 84 bytes can not be divided into chunks of 32 bytes");
|
||||
assert_eq!(
|
||||
err.message(),
|
||||
"Proof of size 84 bytes can not be divided into chunks of 32 bytes"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ mod common;
|
||||
|
||||
pub mod root {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256};
|
||||
use rs_merkle::{algorithms::Sha256, MerkleTree};
|
||||
|
||||
#[test]
|
||||
pub fn should_return_a_correct_root() {
|
||||
@@ -17,7 +17,7 @@ pub mod root {
|
||||
|
||||
pub mod tree_depth {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256};
|
||||
use rs_merkle::{algorithms::Sha256, MerkleTree};
|
||||
|
||||
#[test]
|
||||
pub fn should_return_a_correct_tree_depth() {
|
||||
@@ -32,7 +32,7 @@ pub mod tree_depth {
|
||||
|
||||
pub mod proof {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256};
|
||||
use rs_merkle::{algorithms::Sha256, MerkleTree};
|
||||
|
||||
#[test]
|
||||
pub fn should_return_a_correct_proof() {
|
||||
@@ -54,7 +54,7 @@ pub mod proof {
|
||||
|
||||
pub mod commit {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256, Hasher};
|
||||
use rs_merkle::{algorithms::Sha256, Hasher, MerkleTree};
|
||||
|
||||
#[test]
|
||||
pub fn should_give_correct_root_after_commit() {
|
||||
@@ -63,7 +63,7 @@ pub mod commit {
|
||||
let leaf_hashes = &test_data.leaf_hashes;
|
||||
// let indices_to_prove = vec![3, 4];
|
||||
// let leaves_to_prove = indices_to_prove.iter().cloned().map(|i| leaf_hashes.get(i).unwrap().clone()).collect();
|
||||
let vec = Vec::<[u8;32]>::new();
|
||||
let vec = Vec::<[u8; 32]>::new();
|
||||
|
||||
// Passing empty vec to create an empty tree
|
||||
let mut merkle_tree = MerkleTree::<Sha256>::from_leaves(&vec);
|
||||
@@ -79,7 +79,10 @@ pub mod commit {
|
||||
let leaf = Sha256::hash("g".as_bytes().to_vec().as_ref());
|
||||
merkle_tree.insert(leaf);
|
||||
|
||||
assert_eq!(merkle_tree.uncommitted_root_hex().unwrap(), String::from(expected_root));
|
||||
assert_eq!(
|
||||
merkle_tree.uncommitted_root_hex().unwrap(),
|
||||
String::from(expected_root)
|
||||
);
|
||||
|
||||
// No changes were committed just yet, tree is empty
|
||||
assert_eq!(merkle_tree.root(), None);
|
||||
@@ -92,22 +95,31 @@ pub mod commit {
|
||||
];
|
||||
merkle_tree.append(&mut new_leaves);
|
||||
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034"));
|
||||
assert_eq!(merkle_tree.uncommitted_root_hex().unwrap(), String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6"));
|
||||
assert_eq!(
|
||||
merkle_tree.root_hex().unwrap(),
|
||||
String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034")
|
||||
);
|
||||
assert_eq!(
|
||||
merkle_tree.uncommitted_root_hex().unwrap(),
|
||||
String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6")
|
||||
);
|
||||
|
||||
merkle_tree.commit();
|
||||
let leaves = merkle_tree.leaves().unwrap();
|
||||
let reconstructed_tree = MerkleTree::<Sha256>::from_leaves(&leaves);
|
||||
|
||||
// Check that the commit is applied correctly
|
||||
assert_eq!(reconstructed_tree.root_hex().unwrap(), String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6"));
|
||||
assert_eq!(
|
||||
reconstructed_tree.root_hex().unwrap(),
|
||||
String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6")
|
||||
);
|
||||
assert_eq!(reconstructed_tree.layers(), merkle_tree.layers());
|
||||
}
|
||||
}
|
||||
|
||||
pub mod rollback {
|
||||
use crate::common;
|
||||
use rs_merkle::{MerkleTree, algorithms::Sha256, Hasher};
|
||||
use rs_merkle::{algorithms::Sha256, Hasher, MerkleTree};
|
||||
|
||||
pub fn should_rollback_previous_commit() {
|
||||
let leaf_values = ["a", "b", "c", "d", "e", "f"];
|
||||
@@ -124,42 +136,66 @@ pub mod rollback {
|
||||
|
||||
merkle_tree.commit();
|
||||
|
||||
assert_eq!(merkle_tree.uncommitted_root_hex().unwrap(), String::from("1f7379539707bcaea00564168d1d4d626b09b73f8a2a365234c62d763f854da2"));
|
||||
assert_eq!(
|
||||
merkle_tree.uncommitted_root_hex().unwrap(),
|
||||
String::from("1f7379539707bcaea00564168d1d4d626b09b73f8a2a365234c62d763f854da2")
|
||||
);
|
||||
|
||||
// Adding a new leaf
|
||||
merkle_tree.insert(Sha256::hash("g".as_bytes().to_vec().as_ref()));
|
||||
|
||||
// Uncommitted root must reflect the insert
|
||||
assert_eq!(merkle_tree.uncommitted_root_hex().unwrap(), String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034"));
|
||||
assert_eq!(
|
||||
merkle_tree.uncommitted_root_hex().unwrap(),
|
||||
String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034")
|
||||
);
|
||||
|
||||
merkle_tree.commit();
|
||||
|
||||
// After calling commit, uncommitted root will become committed
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034"));
|
||||
assert_eq!(
|
||||
merkle_tree.root_hex().unwrap(),
|
||||
String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034")
|
||||
);
|
||||
|
||||
// Adding some more leaves
|
||||
merkle_tree.append(vec![
|
||||
Sha256::hash("h".as_bytes().to_vec().as_ref()),
|
||||
Sha256::hash("k".as_bytes().to_vec().as_ref()),
|
||||
].as_mut());
|
||||
merkle_tree.append(
|
||||
vec![
|
||||
Sha256::hash("h".as_bytes().to_vec().as_ref()),
|
||||
Sha256::hash("k".as_bytes().to_vec().as_ref()),
|
||||
]
|
||||
.as_mut(),
|
||||
);
|
||||
|
||||
// Checking that the uncommitted root has changed, but the committed one hasn't
|
||||
assert_eq!(merkle_tree.uncommitted_root_hex().unwrap(), String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6"));
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034"));
|
||||
assert_eq!(
|
||||
merkle_tree.uncommitted_root_hex().unwrap(),
|
||||
String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6")
|
||||
);
|
||||
assert_eq!(
|
||||
merkle_tree.root_hex().unwrap(),
|
||||
String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034")
|
||||
);
|
||||
|
||||
merkle_tree.commit();
|
||||
|
||||
// Checking committed changes again
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6"));
|
||||
assert_eq!(
|
||||
merkle_tree.root_hex().unwrap(),
|
||||
String::from("09b6890b23e32e607f0e5f670ab224e36af8f6599cbe88b468f4b0f761802dd6")
|
||||
);
|
||||
|
||||
merkle_tree.rollback();
|
||||
|
||||
// Check that we rolled one commit back
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034"));
|
||||
assert_eq!(
|
||||
merkle_tree.root_hex().unwrap(),
|
||||
String::from("e2a80e0e872a6c6eaed37b4c1f220e1935004805585b5f99617e48e9c8fe4034")
|
||||
);
|
||||
|
||||
merkle_tree.rollback();
|
||||
|
||||
// Rolling back to the state after the very first commit
|
||||
assert_eq!(merkle_tree.root_hex().unwrap(), expected_root_hex);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user