mirror of
https://github.com/paradigmxyz/reth.git
synced 2026-02-19 03:04:27 -05:00
feat(trie): remove SerialSparseTrie (#21808)
Co-authored-by: Georgios Konstantopoulos <me@gakonst.com> Co-authored-by: Amp <amp@ampcode.com> Co-authored-by: Brian Picciano <me@mediocregopher.com>
This commit is contained in:
5
.changelog/swift-owls-fly.md
Normal file
5
.changelog/swift-owls-fly.md
Normal file
@@ -0,0 +1,5 @@
|
||||
---
|
||||
reth-trie-sparse: minor
|
||||
---
|
||||
|
||||
Removed `SerialSparseTrie` from the workspace, consolidating on `ParallelSparseTrie` as the single sparse trie implementation in `reth-trie-sparse`.
|
||||
32
Cargo.lock
generated
32
Cargo.lock
generated
@@ -8387,7 +8387,6 @@ dependencies = [
|
||||
"reth-trie-db",
|
||||
"reth-trie-parallel",
|
||||
"reth-trie-sparse",
|
||||
"reth-trie-sparse-parallel",
|
||||
"revm",
|
||||
"revm-primitives",
|
||||
"revm-state",
|
||||
@@ -10691,7 +10690,6 @@ dependencies = [
|
||||
"arbitrary",
|
||||
"assert_matches",
|
||||
"auto_impl",
|
||||
"codspeed-criterion-compat",
|
||||
"itertools 0.14.0",
|
||||
"metrics",
|
||||
"pretty_assertions",
|
||||
@@ -10705,40 +10703,10 @@ dependencies = [
|
||||
"reth-primitives-traits",
|
||||
"reth-provider",
|
||||
"reth-storage-api",
|
||||
"reth-testing-utils",
|
||||
"reth-tracing",
|
||||
"reth-trie",
|
||||
"reth-trie-common",
|
||||
"reth-trie-db",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "reth-trie-sparse-parallel"
|
||||
version = "1.10.2"
|
||||
dependencies = [
|
||||
"alloy-primitives",
|
||||
"alloy-rlp",
|
||||
"alloy-trie",
|
||||
"arbitrary",
|
||||
"assert_matches",
|
||||
"itertools 0.14.0",
|
||||
"metrics",
|
||||
"pretty_assertions",
|
||||
"proptest",
|
||||
"proptest-arbitrary-interop",
|
||||
"rand 0.8.5",
|
||||
"rand 0.9.2",
|
||||
"rayon",
|
||||
"reth-execution-errors",
|
||||
"reth-metrics",
|
||||
"reth-primitives-traits",
|
||||
"reth-provider",
|
||||
"reth-tracing",
|
||||
"reth-trie",
|
||||
"reth-trie-common",
|
||||
"reth-trie-db",
|
||||
"reth-trie-sparse",
|
||||
"smallvec",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
@@ -125,7 +125,6 @@ members = [
|
||||
"crates/trie/db",
|
||||
"crates/trie/parallel/",
|
||||
"crates/trie/sparse",
|
||||
"crates/trie/sparse-parallel/",
|
||||
"crates/trie/trie",
|
||||
"examples/beacon-api-sidecar-fetcher/",
|
||||
"examples/beacon-api-sse/",
|
||||
@@ -434,7 +433,6 @@ reth-trie-common = { path = "crates/trie/common", default-features = false }
|
||||
reth-trie-db = { path = "crates/trie/db" }
|
||||
reth-trie-parallel = { path = "crates/trie/parallel" }
|
||||
reth-trie-sparse = { path = "crates/trie/sparse", default-features = false }
|
||||
reth-trie-sparse-parallel = { path = "crates/trie/sparse-parallel" }
|
||||
reth-zstd-compressors = { path = "crates/storage/zstd-compressors", default-features = false }
|
||||
reth-ress-protocol = { path = "crates/ress/protocol" }
|
||||
reth-ress-provider = { path = "crates/ress/provider" }
|
||||
|
||||
@@ -32,7 +32,6 @@ reth-stages-api.workspace = true
|
||||
reth-tasks.workspace = true
|
||||
reth-trie-parallel.workspace = true
|
||||
reth-trie-sparse = { workspace = true, features = ["std", "metrics"] }
|
||||
reth-trie-sparse-parallel = { workspace = true, features = ["std"] }
|
||||
reth-trie.workspace = true
|
||||
reth-trie-common.workspace = true
|
||||
reth-trie-db.workspace = true
|
||||
|
||||
@@ -39,8 +39,9 @@ use reth_trie_parallel::{
|
||||
proof_task::{ProofTaskCtx, ProofWorkerHandle},
|
||||
root::ParallelStateRootError,
|
||||
};
|
||||
use reth_trie_sparse::{RevealableSparseTrie, SparseStateTrie};
|
||||
use reth_trie_sparse_parallel::{ParallelSparseTrie, ParallelismThresholds};
|
||||
use reth_trie_sparse::{
|
||||
ParallelSparseTrie, ParallelismThresholds, RevealableSparseTrie, SparseStateTrie,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
ops::Not,
|
||||
|
||||
@@ -3,12 +3,11 @@
|
||||
use alloy_primitives::B256;
|
||||
use parking_lot::Mutex;
|
||||
use reth_trie_sparse::SparseStateTrie;
|
||||
use reth_trie_sparse_parallel::ParallelSparseTrie;
|
||||
use std::sync::Arc;
|
||||
use tracing::debug;
|
||||
|
||||
/// Type alias for the sparse trie type used in preservation.
|
||||
pub(super) type SparseTrie = SparseStateTrie<ParallelSparseTrie, ParallelSparseTrie>;
|
||||
pub(super) type SparseTrie = SparseStateTrie;
|
||||
|
||||
/// Shared handle to a preserved sparse trie that can be reused across payload validations.
|
||||
///
|
||||
|
||||
@@ -28,7 +28,7 @@ use reth_trie_parallel::{
|
||||
use reth_trie_sparse::{
|
||||
errors::{SparseStateTrieResult, SparseTrieErrorKind, SparseTrieResult},
|
||||
provider::{TrieNodeProvider, TrieNodeProviderFactory},
|
||||
DeferredDrops, LeafUpdate, SerialSparseTrie, SparseStateTrie, SparseTrie, SparseTrieExt,
|
||||
DeferredDrops, LeafUpdate, ParallelSparseTrie, SparseStateTrie, SparseTrie, SparseTrieExt,
|
||||
};
|
||||
use revm_primitives::{hash_map::Entry, B256Map};
|
||||
use smallvec::SmallVec;
|
||||
@@ -97,7 +97,7 @@ where
|
||||
}
|
||||
|
||||
/// A task responsible for populating the sparse trie.
|
||||
pub(super) struct SparseTrieTask<BPF, A = SerialSparseTrie, S = SerialSparseTrie>
|
||||
pub(super) struct SparseTrieTask<BPF, A = ParallelSparseTrie, S = ParallelSparseTrie>
|
||||
where
|
||||
BPF: TrieNodeProviderFactory + Send + Sync,
|
||||
BPF::AccountNodeProvider: TrieNodeProvider + Send + Sync,
|
||||
@@ -212,7 +212,7 @@ where
|
||||
const MAX_PENDING_UPDATES: usize = 100;
|
||||
|
||||
/// Sparse trie task implementation that uses in-memory sparse trie data to schedule proof fetching.
|
||||
pub(super) struct SparseTrieCacheTask<A = SerialSparseTrie, S = SerialSparseTrie> {
|
||||
pub(super) struct SparseTrieCacheTask<A = ParallelSparseTrie, S = ParallelSparseTrie> {
|
||||
/// Sender for proof results.
|
||||
proof_result_tx: CrossbeamSender<ProofResultMessage>,
|
||||
/// Receiver for proof results directly from workers.
|
||||
|
||||
@@ -117,8 +117,8 @@ fn correctly_decodes_branch_node_values() {
|
||||
|
||||
let address = Address::random();
|
||||
let hashed_address = keccak256(address);
|
||||
let hashed_slot1 = B256::with_last_byte(1);
|
||||
let hashed_slot2 = B256::with_last_byte(2);
|
||||
let hashed_slot1 = B256::repeat_byte(1);
|
||||
let hashed_slot2 = B256::repeat_byte(2);
|
||||
|
||||
// Insert account and slots into database
|
||||
provider.insert_account_for_hashing([(address, Some(Account::default()))]).unwrap();
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
[package]
|
||||
name = "reth-trie-sparse-parallel"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
license.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
description = "Parallel Sparse MPT implementation"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
# reth
|
||||
reth-execution-errors.workspace = true
|
||||
reth-trie-common.workspace = true
|
||||
reth-trie-sparse.workspace = true
|
||||
tracing = { workspace = true, features = ["attributes"] }
|
||||
alloy-trie.workspace = true
|
||||
|
||||
# alloy
|
||||
alloy-primitives.workspace = true
|
||||
alloy-rlp.workspace = true
|
||||
|
||||
# metrics
|
||||
reth-metrics = { workspace = true, optional = true }
|
||||
metrics = { workspace = true, optional = true }
|
||||
|
||||
# misc
|
||||
rayon = { workspace = true, optional = true }
|
||||
smallvec.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
# reth
|
||||
reth-primitives-traits.workspace = true
|
||||
reth-provider = { workspace = true, features = ["test-utils"] }
|
||||
reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] }
|
||||
reth-trie-db.workspace = true
|
||||
reth-trie-sparse = { workspace = true, features = ["test-utils"] }
|
||||
reth-trie.workspace = true
|
||||
reth-tracing.workspace = true
|
||||
|
||||
# misc
|
||||
arbitrary.workspace = true
|
||||
assert_matches.workspace = true
|
||||
itertools.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
proptest-arbitrary-interop.workspace = true
|
||||
proptest.workspace = true
|
||||
rand.workspace = true
|
||||
rand_08.workspace = true
|
||||
|
||||
[features]
|
||||
default = ["std", "metrics"]
|
||||
std = [
|
||||
"dep:rayon",
|
||||
"alloy-primitives/std",
|
||||
"alloy-rlp/std",
|
||||
"alloy-trie/std",
|
||||
"reth-execution-errors/std",
|
||||
"reth-primitives-traits/std",
|
||||
"reth-trie-common/std",
|
||||
"reth-trie-sparse/std",
|
||||
"tracing/std",
|
||||
]
|
||||
metrics = [
|
||||
"dep:reth-metrics",
|
||||
"dep:metrics",
|
||||
"std",
|
||||
]
|
||||
@@ -1,14 +0,0 @@
|
||||
//! The implementation of parallel sparse MPT.
|
||||
|
||||
#![cfg_attr(not(test), warn(unused_crate_dependencies))]
|
||||
|
||||
extern crate alloc;
|
||||
|
||||
mod trie;
|
||||
pub use trie::*;
|
||||
|
||||
mod lower;
|
||||
use lower::*;
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
mod metrics;
|
||||
@@ -1,23 +0,0 @@
|
||||
//! Metrics for the parallel sparse trie
|
||||
use reth_metrics::{metrics::Histogram, Metrics};
|
||||
|
||||
/// Metrics for the parallel sparse trie
|
||||
#[derive(Metrics, Clone)]
|
||||
#[metrics(scope = "parallel_sparse_trie")]
|
||||
pub(crate) struct ParallelSparseTrieMetrics {
|
||||
/// A histogram for the number of subtries updated when calculating hashes.
|
||||
pub(crate) subtries_updated: Histogram,
|
||||
/// A histogram for the time it took to update lower subtrie hashes.
|
||||
pub(crate) subtrie_hash_update_latency: Histogram,
|
||||
/// A histogram for the time it took to update the upper subtrie hashes.
|
||||
pub(crate) subtrie_upper_hash_latency: Histogram,
|
||||
}
|
||||
|
||||
impl PartialEq for ParallelSparseTrieMetrics {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
// It does not make sense to compare metrics, so return true, all are equal
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ParallelSparseTrieMetrics {}
|
||||
@@ -26,6 +26,7 @@ alloy-rlp.workspace = true
|
||||
# misc
|
||||
auto_impl.workspace = true
|
||||
rayon = { workspace = true, optional = true }
|
||||
smallvec.workspace = true
|
||||
|
||||
# metrics
|
||||
reth-metrics = { workspace = true, optional = true }
|
||||
@@ -35,7 +36,6 @@ metrics = { workspace = true, optional = true }
|
||||
reth-primitives-traits = { workspace = true, features = ["arbitrary"] }
|
||||
reth-provider = { workspace = true, features = ["test-utils"] }
|
||||
reth-storage-api.workspace = true
|
||||
reth-testing-utils.workspace = true
|
||||
reth-trie = { workspace = true, features = ["test-utils"] }
|
||||
reth-trie-common = { workspace = true, features = ["test-utils", "arbitrary"] }
|
||||
reth-trie-db = { workspace = true, features = ["test-utils"] }
|
||||
@@ -43,7 +43,6 @@ reth-tracing.workspace = true
|
||||
|
||||
arbitrary.workspace = true
|
||||
assert_matches.workspace = true
|
||||
criterion.workspace = true
|
||||
itertools.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
proptest-arbitrary-interop.workspace = true
|
||||
@@ -80,12 +79,5 @@ arbitrary = [
|
||||
"alloy-trie/arbitrary",
|
||||
"reth-primitives-traits/arbitrary",
|
||||
"reth-trie-common/arbitrary",
|
||||
"smallvec/arbitrary",
|
||||
]
|
||||
|
||||
[[bench]]
|
||||
name = "root"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "rlp_node"
|
||||
harness = false
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use alloy_primitives::{B256, U256};
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use prop::strategy::ValueTree;
|
||||
use proptest::{prelude::*, test_runner::TestRunner};
|
||||
use rand::{seq::IteratorRandom, Rng};
|
||||
use reth_testing_utils::generators;
|
||||
use reth_trie::Nibbles;
|
||||
use reth_trie_sparse::{provider::DefaultTrieNodeProvider, SerialSparseTrie, SparseTrie};
|
||||
|
||||
fn update_rlp_node_level(c: &mut Criterion) {
|
||||
let mut rng = generators::rng();
|
||||
let mut group = c.benchmark_group("update rlp node level");
|
||||
group.sample_size(20);
|
||||
|
||||
for size in [100_000] {
|
||||
let mut runner = TestRunner::deterministic();
|
||||
let state = proptest::collection::hash_map(any::<B256>(), any::<U256>(), size)
|
||||
.new_tree(&mut runner)
|
||||
.unwrap()
|
||||
.current();
|
||||
|
||||
// Create a sparse trie with `size` leaves
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
let mut sparse = SerialSparseTrie::default();
|
||||
for (key, value) in &state {
|
||||
sparse
|
||||
.update_leaf(
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(value).to_vec(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
sparse.root();
|
||||
|
||||
for updated_leaves in [0.1, 1.0] {
|
||||
for key in state
|
||||
.keys()
|
||||
.choose_multiple(&mut rng, (size as f64 * (updated_leaves / 100.0)) as usize)
|
||||
{
|
||||
sparse
|
||||
.update_leaf(
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(&rng.random::<U256>()).to_vec(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
// Calculate the maximum depth of the trie for the given number of leaves
|
||||
let max_depth = (size as f64).log(16.0).ceil() as usize;
|
||||
|
||||
for depth in 0..=max_depth {
|
||||
group.bench_function(
|
||||
format!("size {size} | updated {updated_leaves}% | depth {depth}"),
|
||||
|b| {
|
||||
b.iter_batched_ref(
|
||||
|| sparse.clone(),
|
||||
|cloned| cloned.update_rlp_node_level(depth),
|
||||
criterion::BatchSize::PerIteration,
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
criterion_group!(rlp_node, update_rlp_node_level);
|
||||
criterion_main!(rlp_node);
|
||||
@@ -1,259 +0,0 @@
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use alloy_primitives::{map::B256Map, B256, U256};
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use itertools::Itertools;
|
||||
use proptest::{prelude::*, strategy::ValueTree, test_runner::TestRunner};
|
||||
use reth_trie::{
|
||||
hashed_cursor::{noop::NoopHashedCursor, HashedPostStateCursor},
|
||||
node_iter::{TrieElement, TrieNodeIter},
|
||||
trie_cursor::{noop::NoopStorageTrieCursor, InMemoryTrieCursor},
|
||||
updates::StorageTrieUpdates,
|
||||
walker::TrieWalker,
|
||||
HashedStorage,
|
||||
};
|
||||
use reth_trie_common::{updates::TrieUpdatesSorted, HashBuilder, Nibbles};
|
||||
use reth_trie_sparse::{provider::DefaultTrieNodeProvider, RevealableSparseTrie, SerialSparseTrie};
|
||||
|
||||
fn calculate_root_from_leaves(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("calculate root from leaves");
|
||||
group.sample_size(20);
|
||||
|
||||
for size in [1_000, 5_000, 10_000, 100_000] {
|
||||
// Too slow.
|
||||
#[expect(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && size > 5_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let state = generate_test_data(size);
|
||||
|
||||
// hash builder
|
||||
group.bench_function(BenchmarkId::new("hash builder", size), |b| {
|
||||
b.iter_with_setup(HashBuilder::default, |mut hb| {
|
||||
for (key, value) in state.iter().sorted_by_key(|(key, _)| *key) {
|
||||
hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(value));
|
||||
}
|
||||
hb.root();
|
||||
hb
|
||||
})
|
||||
});
|
||||
|
||||
// sparse trie
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
group.bench_function(BenchmarkId::new("sparse trie", size), |b| {
|
||||
b.iter_with_setup(
|
||||
RevealableSparseTrie::<SerialSparseTrie>::revealed_empty,
|
||||
|mut sparse| {
|
||||
for (key, value) in &state {
|
||||
sparse
|
||||
.update_leaf(
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(value).to_vec(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
sparse.root().unwrap();
|
||||
sparse
|
||||
},
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate_root_from_leaves_repeated(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("calculate root from leaves repeated");
|
||||
group.sample_size(20);
|
||||
|
||||
for init_size in [1_000, 10_000, 100_000] {
|
||||
// Too slow.
|
||||
#[expect(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && init_size > 10_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let init_state = generate_test_data(init_size);
|
||||
|
||||
for update_size in [100, 1_000, 5_000, 10_000] {
|
||||
// Too slow.
|
||||
#[expect(unexpected_cfgs)]
|
||||
if cfg!(codspeed) && update_size > 1_000 {
|
||||
continue;
|
||||
}
|
||||
|
||||
for num_updates in [1, 3, 5, 10] {
|
||||
let updates =
|
||||
(0..num_updates).map(|_| generate_test_data(update_size)).collect::<Vec<_>>();
|
||||
|
||||
// hash builder
|
||||
let benchmark_id = BenchmarkId::new(
|
||||
"hash builder",
|
||||
format!(
|
||||
"init size {init_size} | update size {update_size} | num updates {num_updates}"
|
||||
),
|
||||
);
|
||||
group.bench_function(benchmark_id, |b| {
|
||||
b.iter_with_setup(
|
||||
|| {
|
||||
let init_storage = HashedStorage::from_iter(false, init_state.clone());
|
||||
let storage_updates = updates
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|update| HashedStorage::from_iter(false, update))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut hb = HashBuilder::default().with_updates(true);
|
||||
for (key, value) in init_state.iter().sorted_by_key(|(key, _)| *key) {
|
||||
hb.add_leaf(
|
||||
Nibbles::unpack(key),
|
||||
&alloy_rlp::encode_fixed_size(value),
|
||||
);
|
||||
}
|
||||
hb.root();
|
||||
|
||||
let (_, updates) = hb.split();
|
||||
let trie_updates = StorageTrieUpdates::new(updates);
|
||||
(init_storage, storage_updates, trie_updates)
|
||||
},
|
||||
|(init_storage, storage_updates, mut trie_updates)| {
|
||||
let mut storage = init_storage;
|
||||
let mut storage_updates = storage_updates.into_iter().peekable();
|
||||
while let Some(update) = storage_updates.next() {
|
||||
storage.extend(&update);
|
||||
|
||||
let prefix_set = update.construct_prefix_set().freeze();
|
||||
let (storage_sorted, trie_updates_sorted) =
|
||||
if storage_updates.peek().is_some() {
|
||||
(
|
||||
storage.clone().into_sorted(),
|
||||
trie_updates.clone().into_sorted(),
|
||||
)
|
||||
} else {
|
||||
(
|
||||
std::mem::take(&mut storage).into_sorted(),
|
||||
std::mem::take(&mut trie_updates).into_sorted(),
|
||||
)
|
||||
};
|
||||
|
||||
// Create a TrieUpdatesSorted with just this storage trie
|
||||
let mut storage_tries = Default::default();
|
||||
alloy_primitives::map::B256Map::insert(
|
||||
&mut storage_tries,
|
||||
B256::ZERO,
|
||||
trie_updates_sorted.clone(),
|
||||
);
|
||||
let full_trie_updates =
|
||||
TrieUpdatesSorted::new(Vec::new(), storage_tries);
|
||||
|
||||
let walker = TrieWalker::<_>::storage_trie(
|
||||
InMemoryTrieCursor::new_storage(
|
||||
NoopStorageTrieCursor::default(),
|
||||
&full_trie_updates,
|
||||
B256::ZERO,
|
||||
),
|
||||
prefix_set,
|
||||
);
|
||||
let hashed_address = B256::ZERO;
|
||||
let mut storages = alloy_primitives::map::B256Map::default();
|
||||
storages.insert(hashed_address, storage_sorted.clone());
|
||||
let hashed_post_state =
|
||||
reth_trie::HashedPostStateSorted::new(Vec::new(), storages);
|
||||
|
||||
let mut node_iter = TrieNodeIter::storage_trie(
|
||||
walker,
|
||||
HashedPostStateCursor::new_storage(
|
||||
NoopHashedCursor::<U256>::default(),
|
||||
&hashed_post_state,
|
||||
hashed_address,
|
||||
),
|
||||
);
|
||||
|
||||
let mut hb = HashBuilder::default().with_updates(true);
|
||||
while let Some(node) = node_iter.try_next().unwrap() {
|
||||
match node {
|
||||
TrieElement::Branch(node) => {
|
||||
hb.add_branch(
|
||||
node.key,
|
||||
node.value,
|
||||
node.children_are_in_trie,
|
||||
);
|
||||
}
|
||||
TrieElement::Leaf(hashed_slot, value) => {
|
||||
hb.add_leaf(
|
||||
Nibbles::unpack(hashed_slot),
|
||||
alloy_rlp::encode_fixed_size(&value).as_ref(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
hb.root();
|
||||
|
||||
if storage_updates.peek().is_some() {
|
||||
trie_updates.finalize(hb, node_iter.walker.take_removed_keys());
|
||||
}
|
||||
}
|
||||
(storage, storage_updates, trie_updates)
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
// sparse trie
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
let benchmark_id = BenchmarkId::new(
|
||||
"sparse trie",
|
||||
format!(
|
||||
"init size {init_size} | update size {update_size} | num updates {num_updates}"
|
||||
),
|
||||
);
|
||||
group.bench_function(benchmark_id, |b| {
|
||||
b.iter_with_setup(
|
||||
|| {
|
||||
let mut sparse =
|
||||
RevealableSparseTrie::<SerialSparseTrie>::revealed_empty();
|
||||
for (key, value) in &init_state {
|
||||
sparse
|
||||
.update_leaf(
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(value).to_vec(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
sparse.root().unwrap();
|
||||
sparse
|
||||
},
|
||||
|mut sparse| {
|
||||
for update in &updates {
|
||||
for (key, value) in update {
|
||||
sparse
|
||||
.update_leaf(
|
||||
Nibbles::unpack(key),
|
||||
alloy_rlp::encode_fixed_size(value).to_vec(),
|
||||
&provider,
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
sparse.root().unwrap();
|
||||
}
|
||||
sparse
|
||||
},
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_test_data(size: usize) -> B256Map<U256> {
|
||||
let mut runner = TestRunner::deterministic();
|
||||
proptest::collection::hash_map(any::<B256>(), any::<U256>(), size)
|
||||
.new_tree(&mut runner)
|
||||
.unwrap()
|
||||
.current()
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
|
||||
criterion_group!(root, calculate_root_from_leaves, calculate_root_from_leaves_repeated);
|
||||
criterion_main!(root);
|
||||
@@ -14,6 +14,11 @@ pub use trie::*;
|
||||
mod traits;
|
||||
pub use traits::*;
|
||||
|
||||
mod parallel;
|
||||
pub use parallel::*;
|
||||
|
||||
mod lower;
|
||||
|
||||
pub mod provider;
|
||||
|
||||
#[cfg(feature = "metrics")]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::SparseSubtrie;
|
||||
use crate::parallel::SparseSubtrie;
|
||||
use alloc::boxed::Box;
|
||||
use reth_trie_common::Nibbles;
|
||||
|
||||
/// Tracks the state of the lower subtries.
|
||||
@@ -73,3 +73,24 @@ pub(crate) struct SparseStateTrieInnerMetrics {
|
||||
/// Histogram of total storage nodes, including those that were skipped.
|
||||
pub(crate) multiproof_total_storage_nodes: Histogram,
|
||||
}
|
||||
|
||||
/// Metrics for the parallel sparse trie
|
||||
#[derive(Metrics, Clone)]
|
||||
#[metrics(scope = "parallel_sparse_trie")]
|
||||
pub(crate) struct ParallelSparseTrieMetrics {
|
||||
/// A histogram for the number of subtries updated when calculating hashes.
|
||||
pub(crate) subtries_updated: Histogram,
|
||||
/// A histogram for the time it took to update lower subtrie hashes.
|
||||
pub(crate) subtrie_hash_update_latency: Histogram,
|
||||
/// A histogram for the time it took to update the upper subtrie hashes.
|
||||
pub(crate) subtrie_upper_hash_latency: Histogram,
|
||||
}
|
||||
|
||||
impl PartialEq for ParallelSparseTrieMetrics {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
// It does not make sense to compare metrics, so return true, all are equal
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ParallelSparseTrieMetrics {}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
use crate::LowerSparseSubtrie;
|
||||
use alloc::borrow::Cow;
|
||||
use crate::{
|
||||
lower::LowerSparseSubtrie,
|
||||
provider::{RevealedNode, TrieNodeProvider},
|
||||
LeafLookup, LeafLookupError, RlpNodeStackItem, SparseNode, SparseNodeType, SparseTrie,
|
||||
SparseTrieExt, SparseTrieUpdates,
|
||||
};
|
||||
use alloc::{borrow::Cow, boxed::Box, vec, vec::Vec};
|
||||
use alloy_primitives::{
|
||||
map::{Entry, HashMap},
|
||||
B256, U256,
|
||||
};
|
||||
use alloy_rlp::Decodable;
|
||||
use alloy_trie::{BranchNodeCompact, TrieMask, EMPTY_ROOT_HASH};
|
||||
use core::cmp::{Ord, Ordering, PartialOrd};
|
||||
use reth_execution_errors::{SparseTrieError, SparseTrieErrorKind, SparseTrieResult};
|
||||
use reth_trie_common::{
|
||||
prefix_set::{PrefixSet, PrefixSetMut},
|
||||
BranchNodeMasks, BranchNodeMasksMap, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles,
|
||||
ProofTrieNode, RlpNode, TrieNode,
|
||||
};
|
||||
use reth_trie_sparse::{
|
||||
provider::{RevealedNode, TrieNodeProvider},
|
||||
LeafLookup, LeafLookupError, RlpNodeStackItem, SparseNode, SparseNodeType, SparseTrie,
|
||||
SparseTrieExt, SparseTrieUpdates,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use tracing::{debug, instrument, trace};
|
||||
|
||||
/// The maximum length of a path, in nibbles, which belongs to the upper subtrie of a
|
||||
@@ -1059,7 +1059,7 @@ impl SparseTrie for ParallelSparseTrie {
|
||||
// First, do a quick check if the value exists in either the upper or lower subtrie's values
|
||||
// map. We assume that if there exists a leaf node, then its value will be in the `values`
|
||||
// map.
|
||||
if let Some(actual_value) = std::iter::once(self.upper_subtrie.as_ref())
|
||||
if let Some(actual_value) = core::iter::once(self.upper_subtrie.as_ref())
|
||||
.chain(self.lower_subtrie_for_path(full_path))
|
||||
.filter_map(|subtrie| subtrie.inner.values.get(full_path))
|
||||
.next()
|
||||
@@ -1317,10 +1317,10 @@ impl SparseTrieExt for ParallelSparseTrie {
|
||||
|
||||
fn update_leaves(
|
||||
&mut self,
|
||||
updates: &mut alloy_primitives::map::B256Map<reth_trie_sparse::LeafUpdate>,
|
||||
updates: &mut alloy_primitives::map::B256Map<crate::LeafUpdate>,
|
||||
mut proof_required_fn: impl FnMut(B256, u8),
|
||||
) -> SparseTrieResult<()> {
|
||||
use reth_trie_sparse::{provider::NoRevealProvider, LeafUpdate};
|
||||
use crate::{provider::NoRevealProvider, LeafUpdate};
|
||||
|
||||
// Collect keys upfront since we mutate `updates` during iteration.
|
||||
// On success, entries are removed; on blinded node failure, they're re-inserted.
|
||||
@@ -3690,7 +3690,11 @@ mod tests {
|
||||
path_subtrie_index_unchecked, LowerSparseSubtrie, ParallelSparseTrie, SparseSubtrie,
|
||||
SparseSubtrieType,
|
||||
};
|
||||
use crate::trie::ChangedSubtrie;
|
||||
use crate::{
|
||||
parallel::ChangedSubtrie,
|
||||
provider::{DefaultTrieNodeProvider, RevealedNode, TrieNodeProvider},
|
||||
LeafLookup, LeafLookupError, SparseNode, SparseTrie, SparseTrieExt, SparseTrieUpdates,
|
||||
};
|
||||
use alloy_primitives::{
|
||||
b256, hex,
|
||||
map::{B256Set, DefaultHashBuilder, HashMap},
|
||||
@@ -3720,11 +3724,6 @@ mod tests {
|
||||
ProofTrieNode, RlpNode, TrieMask, TrieNode, EMPTY_ROOT_HASH,
|
||||
};
|
||||
use reth_trie_db::DatabaseTrieCursorFactory;
|
||||
use reth_trie_sparse::{
|
||||
provider::{DefaultTrieNodeProvider, RevealedNode, TrieNodeProvider},
|
||||
LeafLookup, LeafLookupError, SerialSparseTrie, SparseNode, SparseTrie, SparseTrieExt,
|
||||
SparseTrieUpdates,
|
||||
};
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
/// Pad nibbles to the length of a B256 hash with zeros on the right.
|
||||
@@ -5135,7 +5134,7 @@ mod tests {
|
||||
//
|
||||
// After removing 0x123, the trie becomes empty
|
||||
//
|
||||
let mut trie = new_test_trie(std::iter::once((
|
||||
let mut trie = new_test_trie(core::iter::once((
|
||||
Nibbles::default(),
|
||||
SparseNode::new_leaf(Nibbles::from_nibbles([0x1, 0x2, 0x3])),
|
||||
)));
|
||||
@@ -5599,7 +5598,7 @@ mod tests {
|
||||
|
||||
let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
|
||||
run_hash_builder(
|
||||
paths.iter().copied().zip(std::iter::repeat_with(value)),
|
||||
paths.iter().copied().zip(core::iter::repeat_with(value)),
|
||||
NoopAccountTrieCursor::default(),
|
||||
Default::default(),
|
||||
paths.clone(),
|
||||
@@ -5608,7 +5607,7 @@ mod tests {
|
||||
let mut sparse = ParallelSparseTrie::default().with_updates(true);
|
||||
ctx.update_leaves(
|
||||
&mut sparse,
|
||||
paths.into_iter().zip(std::iter::repeat_with(value_encoded)),
|
||||
paths.into_iter().zip(core::iter::repeat_with(value_encoded)),
|
||||
);
|
||||
|
||||
ctx.assert_with_hash_builder(
|
||||
@@ -5631,7 +5630,7 @@ mod tests {
|
||||
|
||||
let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
|
||||
run_hash_builder(
|
||||
paths.iter().copied().zip(std::iter::repeat_with(value)),
|
||||
paths.iter().copied().zip(core::iter::repeat_with(value)),
|
||||
NoopAccountTrieCursor::default(),
|
||||
Default::default(),
|
||||
paths.clone(),
|
||||
@@ -5672,7 +5671,7 @@ mod tests {
|
||||
|
||||
let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
|
||||
run_hash_builder(
|
||||
paths.iter().sorted_unstable().copied().zip(std::iter::repeat_with(value)),
|
||||
paths.iter().sorted_unstable().copied().zip(core::iter::repeat_with(value)),
|
||||
NoopAccountTrieCursor::default(),
|
||||
Default::default(),
|
||||
paths.clone(),
|
||||
@@ -5681,7 +5680,7 @@ mod tests {
|
||||
let mut sparse = ParallelSparseTrie::default().with_updates(true);
|
||||
ctx.update_leaves(
|
||||
&mut sparse,
|
||||
paths.iter().copied().zip(std::iter::repeat_with(value_encoded)),
|
||||
paths.iter().copied().zip(core::iter::repeat_with(value_encoded)),
|
||||
);
|
||||
ctx.assert_with_hash_builder(
|
||||
&mut sparse,
|
||||
@@ -5711,7 +5710,7 @@ mod tests {
|
||||
|
||||
let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
|
||||
run_hash_builder(
|
||||
paths.iter().copied().zip(std::iter::repeat_with(|| old_value)),
|
||||
paths.iter().copied().zip(core::iter::repeat_with(|| old_value)),
|
||||
NoopAccountTrieCursor::default(),
|
||||
Default::default(),
|
||||
paths.clone(),
|
||||
@@ -5720,7 +5719,7 @@ mod tests {
|
||||
let mut sparse = ParallelSparseTrie::default().with_updates(true);
|
||||
ctx.update_leaves(
|
||||
&mut sparse,
|
||||
paths.iter().copied().zip(std::iter::repeat(old_value_encoded)),
|
||||
paths.iter().copied().zip(core::iter::repeat(old_value_encoded)),
|
||||
);
|
||||
ctx.assert_with_hash_builder(
|
||||
&mut sparse,
|
||||
@@ -5731,7 +5730,7 @@ mod tests {
|
||||
|
||||
let (hash_builder_root, hash_builder_updates, hash_builder_proof_nodes, _, _) =
|
||||
run_hash_builder(
|
||||
paths.iter().copied().zip(std::iter::repeat(new_value)),
|
||||
paths.iter().copied().zip(core::iter::repeat(new_value)),
|
||||
NoopAccountTrieCursor::default(),
|
||||
Default::default(),
|
||||
paths.clone(),
|
||||
@@ -5739,7 +5738,7 @@ mod tests {
|
||||
|
||||
ctx.update_leaves(
|
||||
&mut sparse,
|
||||
paths.iter().copied().zip(std::iter::repeat(new_value_encoded)),
|
||||
paths.iter().copied().zip(core::iter::repeat(new_value_encoded)),
|
||||
);
|
||||
ctx.assert_with_hash_builder(
|
||||
&mut sparse,
|
||||
@@ -6266,108 +6265,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sparse_trie_fuzz_vs_serial() {
|
||||
// Having only the first 3 nibbles set, we narrow down the range of keys
|
||||
// to 4096 different hashes. It allows us to generate collisions more likely
|
||||
// to test the sparse trie updates.
|
||||
const KEY_NIBBLES_LEN: usize = 3;
|
||||
|
||||
fn test(updates: Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)>) {
|
||||
let default_provider = DefaultTrieNodeProvider;
|
||||
let mut serial = SerialSparseTrie::default().with_updates(true);
|
||||
let mut parallel = ParallelSparseTrie::default().with_updates(true);
|
||||
|
||||
for (update, keys_to_delete) in updates {
|
||||
// Perform leaf updates on both tries
|
||||
for (key, account) in update.clone() {
|
||||
let account = account.into_trie_account(EMPTY_ROOT_HASH);
|
||||
let mut account_rlp = Vec::new();
|
||||
account.encode(&mut account_rlp);
|
||||
serial.update_leaf(key, account_rlp.clone(), &default_provider).unwrap();
|
||||
parallel.update_leaf(key, account_rlp, &default_provider).unwrap();
|
||||
}
|
||||
|
||||
// Calculate roots and assert their equality
|
||||
let serial_root = serial.root();
|
||||
let parallel_root = parallel.root();
|
||||
assert_eq!(parallel_root, serial_root);
|
||||
|
||||
// Assert that both tries produce the same updates
|
||||
let serial_updates = serial.take_updates();
|
||||
let parallel_updates = parallel.take_updates();
|
||||
pretty_assertions::assert_eq!(
|
||||
BTreeMap::from_iter(parallel_updates.updated_nodes),
|
||||
BTreeMap::from_iter(serial_updates.updated_nodes),
|
||||
);
|
||||
pretty_assertions::assert_eq!(
|
||||
BTreeSet::from_iter(parallel_updates.removed_nodes),
|
||||
BTreeSet::from_iter(serial_updates.removed_nodes),
|
||||
);
|
||||
|
||||
// Perform leaf removals on both tries
|
||||
for key in &keys_to_delete {
|
||||
parallel.remove_leaf(key, &default_provider).unwrap();
|
||||
serial.remove_leaf(key, &default_provider).unwrap();
|
||||
}
|
||||
|
||||
// Calculate roots and assert their equality
|
||||
let serial_root = serial.root();
|
||||
let parallel_root = parallel.root();
|
||||
assert_eq!(parallel_root, serial_root);
|
||||
|
||||
// Assert that both tries produce the same updates
|
||||
let serial_updates = serial.take_updates();
|
||||
let parallel_updates = parallel.take_updates();
|
||||
pretty_assertions::assert_eq!(
|
||||
BTreeMap::from_iter(parallel_updates.updated_nodes),
|
||||
BTreeMap::from_iter(serial_updates.updated_nodes),
|
||||
);
|
||||
pretty_assertions::assert_eq!(
|
||||
BTreeSet::from_iter(parallel_updates.removed_nodes),
|
||||
BTreeSet::from_iter(serial_updates.removed_nodes),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn transform_updates(
|
||||
updates: Vec<BTreeMap<Nibbles, Account>>,
|
||||
mut rng: impl rand::Rng,
|
||||
) -> Vec<(BTreeMap<Nibbles, Account>, BTreeSet<Nibbles>)> {
|
||||
let mut keys = BTreeSet::new();
|
||||
updates
|
||||
.into_iter()
|
||||
.map(|update| {
|
||||
keys.extend(update.keys().copied());
|
||||
|
||||
let keys_to_delete_len = update.len() / 2;
|
||||
let keys_to_delete = (0..keys_to_delete_len)
|
||||
.map(|_| {
|
||||
let key =
|
||||
*rand::seq::IteratorRandom::choose(keys.iter(), &mut rng).unwrap();
|
||||
keys.take(&key).unwrap()
|
||||
})
|
||||
.collect();
|
||||
|
||||
(update, keys_to_delete)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
proptest!(ProptestConfig::with_cases(10), |(
|
||||
updates in proptest::collection::vec(
|
||||
proptest::collection::btree_map(
|
||||
any_with::<Nibbles>(SizeRange::new(KEY_NIBBLES_LEN..=KEY_NIBBLES_LEN)).prop_map(pad_nibbles_right),
|
||||
arb::<Account>(),
|
||||
1..50,
|
||||
),
|
||||
1..50,
|
||||
).prop_perturb(transform_updates)
|
||||
)| {
|
||||
test(updates)
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sparse_trie_two_leaves_at_lower_roots() {
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
@@ -8192,7 +8089,7 @@ mod tests {
|
||||
// the value must be removed when that path becomes a pruned root.
|
||||
// This catches the bug where is_strict_descendant fails to remove p == pruned_root.
|
||||
|
||||
use reth_trie_sparse::provider::DefaultTrieNodeProvider;
|
||||
use crate::provider::DefaultTrieNodeProvider;
|
||||
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
let mut parallel = ParallelSparseTrie::default();
|
||||
@@ -8532,8 +8429,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_successful_update() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
@@ -8567,8 +8464,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_insert_new_leaf() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
let mut trie = ParallelSparseTrie::default();
|
||||
@@ -8604,8 +8501,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_blinded_node() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a trie with a blinded node
|
||||
@@ -8680,8 +8577,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_removal() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
@@ -8713,8 +8610,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_removal_blinded() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a trie with a blinded node
|
||||
@@ -8797,8 +8694,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_removal_branch_collapse_blinded() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a branch node at root with two children:
|
||||
@@ -8902,8 +8799,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_touched() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
let provider = DefaultTrieNodeProvider;
|
||||
@@ -8946,8 +8843,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_touched_nonexistent() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
let mut trie = ParallelSparseTrie::default();
|
||||
@@ -8992,8 +8889,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_touched_blinded() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a trie with a blinded node
|
||||
@@ -9061,8 +8958,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_deduplication() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a trie with a blinded node
|
||||
@@ -9133,8 +9030,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_update_leaves_node_not_found_in_provider_atomicity() {
|
||||
use crate::LeafUpdate;
|
||||
use alloy_primitives::map::B256Map;
|
||||
use reth_trie_sparse::LeafUpdate;
|
||||
use std::cell::RefCell;
|
||||
|
||||
// Create a trie with retain_updates enabled (this triggers the code path that
|
||||
@@ -1,7 +1,7 @@
|
||||
use crate::{
|
||||
provider::{TrieNodeProvider, TrieNodeProviderFactory},
|
||||
traits::{SparseTrie as SparseTrieTrait, SparseTrieExt},
|
||||
RevealableSparseTrie, SerialSparseTrie,
|
||||
ParallelSparseTrie, RevealableSparseTrie,
|
||||
};
|
||||
use alloc::{collections::VecDeque, vec::Vec};
|
||||
use alloy_primitives::{
|
||||
@@ -36,8 +36,8 @@ pub struct DeferredDrops {
|
||||
#[derive(Debug)]
|
||||
/// Sparse state trie representing lazy-loaded Ethereum state trie.
|
||||
pub struct SparseStateTrie<
|
||||
A = SerialSparseTrie, // Account trie implementation
|
||||
S = SerialSparseTrie, // Storage trie implementation
|
||||
A = ParallelSparseTrie, // Account trie implementation
|
||||
S = ParallelSparseTrie, // Storage trie implementation
|
||||
> {
|
||||
/// Sparse account trie.
|
||||
state: RevealableSparseTrie<A>,
|
||||
@@ -143,16 +143,18 @@ impl<A, S> SparseStateTrie<A, S> {
|
||||
}
|
||||
}
|
||||
|
||||
impl SparseStateTrie {
|
||||
/// Create new [`SparseStateTrie`] with the default trie implementation.
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, S> SparseStateTrie<A, S>
|
||||
where
|
||||
A: SparseTrieTrait + Default,
|
||||
S: SparseTrieTrait + Default + Clone,
|
||||
{
|
||||
/// Create new [`SparseStateTrie`]
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
/// Returns mutable reference to account trie.
|
||||
pub const fn trie_mut(&mut self) -> &mut RevealableSparseTrie<A> {
|
||||
&mut self.state
|
||||
@@ -1179,7 +1181,7 @@ where
|
||||
/// of [`SparseStateTrie`] both to help enforce allocation re-use and to allow us to implement
|
||||
/// methods like `get_trie_and_revealed_paths` which return multiple mutable borrows.
|
||||
#[derive(Debug, Default)]
|
||||
struct StorageTries<S = SerialSparseTrie> {
|
||||
struct StorageTries<S = ParallelSparseTrie> {
|
||||
/// Sparse storage tries.
|
||||
tries: B256Map<RevealableSparseTrie<S>>,
|
||||
/// Cleared storage tries, kept for re-use.
|
||||
@@ -1690,7 +1692,7 @@ fn filter_revealed_v2_proof_nodes(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::provider::DefaultTrieNodeProviderFactory;
|
||||
use crate::{provider::DefaultTrieNodeProviderFactory, LeafLookup, ParallelSparseTrie};
|
||||
use alloy_primitives::{
|
||||
b256,
|
||||
map::{HashMap, HashSet},
|
||||
@@ -1708,7 +1710,7 @@ mod tests {
|
||||
#[test]
|
||||
fn reveal_account_path_twice() {
|
||||
let provider_factory = DefaultTrieNodeProviderFactory;
|
||||
let mut sparse = SparseStateTrie::<SerialSparseTrie>::default();
|
||||
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
|
||||
|
||||
let leaf_value = alloy_rlp::encode(TrieAccount::default());
|
||||
let leaf_1 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
|
||||
@@ -1738,11 +1740,10 @@ mod tests {
|
||||
|
||||
// Reveal multiproof and check that the state trie contains the leaf node and value
|
||||
sparse.reveal_decoded_multiproof(multiproof.clone().try_into().unwrap()).unwrap();
|
||||
assert!(sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])),);
|
||||
assert!(matches!(
|
||||
sparse.state_trie_ref().unwrap().find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::Exists)
|
||||
));
|
||||
assert_eq!(
|
||||
sparse.state_trie_ref().unwrap().get_leaf_value(&Nibbles::from_nibbles([0x0])),
|
||||
Some(&leaf_value)
|
||||
@@ -1751,11 +1752,10 @@ mod tests {
|
||||
// Remove the leaf node and check that the state trie does not contain the leaf node and
|
||||
// value
|
||||
sparse.remove_account_leaf(&Nibbles::from_nibbles([0x0]), &provider_factory).unwrap();
|
||||
assert!(!sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])),);
|
||||
assert!(matches!(
|
||||
sparse.state_trie_ref().unwrap().find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::NonExistent)
|
||||
));
|
||||
assert!(sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
@@ -1765,11 +1765,10 @@ mod tests {
|
||||
// Reveal multiproof again and check that the state trie still does not contain the leaf
|
||||
// node and value, because they were already revealed before
|
||||
sparse.reveal_decoded_multiproof(multiproof.try_into().unwrap()).unwrap();
|
||||
assert!(!sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])));
|
||||
assert!(matches!(
|
||||
sparse.state_trie_ref().unwrap().find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::NonExistent)
|
||||
));
|
||||
assert!(sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
@@ -1780,7 +1779,7 @@ mod tests {
|
||||
#[test]
|
||||
fn reveal_storage_path_twice() {
|
||||
let provider_factory = DefaultTrieNodeProviderFactory;
|
||||
let mut sparse = SparseStateTrie::<SerialSparseTrie>::default();
|
||||
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
|
||||
|
||||
let leaf_value = alloy_rlp::encode(TrieAccount::default());
|
||||
let leaf_1 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
|
||||
@@ -1817,11 +1816,13 @@ mod tests {
|
||||
|
||||
// Reveal multiproof and check that the storage trie contains the leaf node and value
|
||||
sparse.reveal_decoded_multiproof(multiproof.clone().try_into().unwrap()).unwrap();
|
||||
assert!(sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])),);
|
||||
assert!(matches!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::Exists)
|
||||
));
|
||||
assert_eq!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
@@ -1835,11 +1836,13 @@ mod tests {
|
||||
sparse
|
||||
.remove_storage_leaf(B256::ZERO, &Nibbles::from_nibbles([0x0]), &provider_factory)
|
||||
.unwrap();
|
||||
assert!(!sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])),);
|
||||
assert!(matches!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::NonExistent)
|
||||
));
|
||||
assert!(sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
@@ -1849,11 +1852,13 @@ mod tests {
|
||||
// Reveal multiproof again and check that the storage trie still does not contain the leaf
|
||||
// node and value, because they were already revealed before
|
||||
sparse.reveal_decoded_multiproof(multiproof.try_into().unwrap()).unwrap();
|
||||
assert!(!sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])));
|
||||
assert!(matches!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::NonExistent)
|
||||
));
|
||||
assert!(sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
@@ -1864,7 +1869,7 @@ mod tests {
|
||||
#[test]
|
||||
fn reveal_v2_proof_nodes() {
|
||||
let provider_factory = DefaultTrieNodeProviderFactory;
|
||||
let mut sparse = SparseStateTrie::<SerialSparseTrie>::default();
|
||||
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
|
||||
|
||||
let leaf_value = alloy_rlp::encode(TrieAccount::default());
|
||||
let leaf_1_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), leaf_value.clone()));
|
||||
@@ -1896,11 +1901,10 @@ mod tests {
|
||||
sparse.reveal_account_v2_proof_nodes(v2_proof_nodes.clone()).unwrap();
|
||||
|
||||
// Check that the state trie contains the leaf node and value
|
||||
assert!(sparse
|
||||
.state_trie_ref()
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])));
|
||||
assert!(matches!(
|
||||
sparse.state_trie_ref().unwrap().find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::Exists)
|
||||
));
|
||||
assert_eq!(
|
||||
sparse.state_trie_ref().unwrap().get_leaf_value(&Nibbles::from_nibbles([0x0])),
|
||||
Some(&leaf_value)
|
||||
@@ -1926,7 +1930,7 @@ mod tests {
|
||||
#[test]
|
||||
fn reveal_storage_v2_proof_nodes() {
|
||||
let provider_factory = DefaultTrieNodeProviderFactory;
|
||||
let mut sparse = SparseStateTrie::<SerialSparseTrie>::default();
|
||||
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
|
||||
|
||||
let storage_value: Vec<u8> = alloy_rlp::encode_fixed_size(&U256::from(42)).to_vec();
|
||||
let leaf_1_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), storage_value.clone()));
|
||||
@@ -1950,11 +1954,13 @@ mod tests {
|
||||
sparse.reveal_storage_v2_proof_nodes(B256::ZERO, v2_proof_nodes.clone()).unwrap();
|
||||
|
||||
// Check that the storage trie contains the leaf node and value
|
||||
assert!(sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.nodes_ref()
|
||||
.contains_key(&Nibbles::from_nibbles([0x0])));
|
||||
assert!(matches!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
.unwrap()
|
||||
.find_leaf(&Nibbles::from_nibbles([0x0]), None),
|
||||
Ok(LeafLookup::Exists)
|
||||
));
|
||||
assert_eq!(
|
||||
sparse
|
||||
.storage_trie_ref(&B256::ZERO)
|
||||
@@ -2038,7 +2044,7 @@ mod tests {
|
||||
let proof_nodes = hash_builder.take_proof_nodes();
|
||||
|
||||
let provider_factory = DefaultTrieNodeProviderFactory;
|
||||
let mut sparse = SparseStateTrie::<SerialSparseTrie>::default().with_updates(true);
|
||||
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default().with_updates(true);
|
||||
sparse
|
||||
.reveal_decoded_multiproof(
|
||||
MultiProof {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,7 @@ use reth_execution_errors::{
|
||||
use reth_trie_common::{MultiProofTargets, Nibbles};
|
||||
use reth_trie_sparse::{
|
||||
provider::{RevealedNode, TrieNodeProvider, TrieNodeProviderFactory},
|
||||
SerialSparseTrie, SparseStateTrie,
|
||||
SparseStateTrie,
|
||||
};
|
||||
use std::sync::mpsc;
|
||||
|
||||
@@ -151,7 +151,7 @@ where
|
||||
ProofTrieNodeProviderFactory::new(self.trie_cursor_factory, self.hashed_cursor_factory),
|
||||
tx,
|
||||
);
|
||||
let mut sparse_trie = SparseStateTrie::<SerialSparseTrie>::new();
|
||||
let mut sparse_trie = SparseStateTrie::new();
|
||||
sparse_trie.reveal_multiproof(multiproof)?;
|
||||
|
||||
// Attempt to update state trie to gather additional information for the witness.
|
||||
|
||||
Reference in New Issue
Block a user