From bb55687f985e45cd05cfa954a83bf04879287cad Mon Sep 17 00:00:00 2001 From: Brian Picciano Date: Wed, 11 Mar 2026 12:47:34 +0100 Subject: [PATCH] test(trie): Implement TrieTestHarness (#22923) Co-authored-by: Amp --- .changelog/neat-ducks-whisper.md | 6 + crates/trie/sparse/src/arena/mod.rs | 277 +------ .../trie/sparse/tests/suite/commit_updates.rs | 4 +- crates/trie/sparse/tests/suite/lifecycle.rs | 56 +- crates/trie/sparse/tests/suite/main.rs | 142 +--- crates/trie/sparse/tests/suite/prune.rs | 6 +- .../trie/sparse/tests/suite/reveal_nodes.rs | 44 +- crates/trie/sparse/tests/suite/root.rs | 11 +- crates/trie/sparse/tests/suite/set_root.rs | 6 +- .../trie/sparse/tests/suite/update_leaves.rs | 75 +- crates/trie/sparse/tests/suite/wipe_clear.rs | 12 +- crates/trie/trie/src/proof_v2/mod.rs | 739 +++--------------- crates/trie/trie/src/test_utils.rs | 247 ++++++ 13 files changed, 553 insertions(+), 1072 deletions(-) create mode 100644 .changelog/neat-ducks-whisper.md diff --git a/.changelog/neat-ducks-whisper.md b/.changelog/neat-ducks-whisper.md new file mode 100644 index 0000000000..4ae17663d5 --- /dev/null +++ b/.changelog/neat-ducks-whisper.md @@ -0,0 +1,6 @@ +--- +reth-trie: patch +reth-trie-sparse: patch +--- + +Refactored test harness for sparse trie tests by extracting `TrieTestHarness` into a shared `reth-trie` test utility, replacing duplicated inline harness code across multiple test modules. Updated `proof_v2` return type to include an optional root hash, and converted `original_root` and `storage` from public fields to accessor methods. diff --git a/crates/trie/sparse/src/arena/mod.rs b/crates/trie/sparse/src/arena/mod.rs index ddf85893cc..42d66eb5be 100644 --- a/crates/trie/sparse/src/arena/mod.rs +++ b/crates/trie/sparse/src/arena/mod.rs @@ -2818,173 +2818,39 @@ impl SparseTrie for ArenaParallelSparseTrie { mod tests { use super::TRACE_TARGET; use crate::{ArenaParallelSparseTrie, ArenaParallelismThresholds, LeafUpdate, SparseTrie}; - use alloy_primitives::{ - map::{B256Map, HashSet}, - B256, U256, - }; + use alloy_primitives::{map::B256Map, B256, U256}; use rand::{seq::SliceRandom, Rng, SeedableRng}; - use reth_trie::{ - hashed_cursor::{ - mock::MockHashedCursorFactory, HashedCursorFactory, HashedPostStateCursorFactory, - }, - prefix_set::PrefixSet, - proof_v2::StorageProofCalculator, - trie_cursor::{mock::MockTrieCursorFactory, TrieCursorFactory}, - StorageRoot, - }; - use reth_trie_common::{ - prefix_set::PrefixSetMut, updates::StorageTrieUpdates, HashedPostStateSorted, - HashedStorage, Nibbles, ProofTrieNodeV2, ProofV2Target, - }; - use std::{collections::BTreeMap, iter::once}; - use tracing::{info, trace, trace_span}; + use reth_trie::test_utils::TrieTestHarness; + use reth_trie_common::{Nibbles, ProofV2Target}; + use std::collections::BTreeMap; + use tracing::{info, trace}; - /// A fixed hashed address used by the harness for all storage trie operations. - const HASHED_ADDRESS: B256 = B256::ZERO; - - /// Test harness for proptest-based arena sparse trie testing of a single storage trie. + /// Test harness for proptest-based arena sparse trie testing. /// - /// Accepts a `BTreeMap` of hashed storage slots as the starting state, - /// computes the initial `StorageTrieUpdates` via `StorageRoot`, and stores both sorted - /// forms for later use. Exposes a `proof_v2` method that generates storage proofs using - /// mock cursors over the starting state. + /// Wraps [`TrieTestHarness`] and adds `ArenaParallelSparseTrie`-specific helpers for + /// the reveal-update loop and asserting that sparse trie updates match `StorageRoot`. struct ArenaTrieTestHarness { - /// The base storage dataset (hashed slot → value). Zero-valued entries are absent. - storage: BTreeMap, - /// The expected storage root, calculated by `StorageRoot`. - original_root: B256, - /// The starting storage trie updates (unsorted), used for `minimize_trie_updates`. - storage_trie_updates: StorageTrieUpdates, - /// Mock factory for trie cursors. - trie_cursor_factory: MockTrieCursorFactory, - /// Mock factory for hashed cursors. - hashed_cursor_factory: MockHashedCursorFactory, + /// The inner general-purpose harness. + inner: TrieTestHarness, + } + + impl std::ops::Deref for ArenaTrieTestHarness { + type Target = TrieTestHarness; + fn deref(&self) -> &Self::Target { + &self.inner + } + } + + impl std::ops::DerefMut for ArenaTrieTestHarness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } } impl ArenaTrieTestHarness { /// Creates a new test harness from a map of hashed storage slots to values. - /// - /// Computes the storage root and `StorageTrieUpdates` using `StorageRoot` with mock - /// cursors, then stores both sorted forms. fn new(storage: BTreeMap) -> Self { - let mut harness = Self { - storage: BTreeMap::new(), - original_root: B256::ZERO, - storage_trie_updates: StorageTrieUpdates::default(), - trie_cursor_factory: MockTrieCursorFactory::new( - BTreeMap::new(), - Default::default(), - ), - hashed_cursor_factory: MockHashedCursorFactory::new( - BTreeMap::new(), - Default::default(), - ), - }; - harness.apply_changeset(storage); - harness - } - - /// Merges `changeset` into the base storage (zero values remove entries) and - /// recomputes the storage root, trie updates, and cursor factories. - fn apply_changeset(&mut self, changeset: BTreeMap) { - for (k, v) in changeset { - if v == U256::ZERO { - self.storage.remove(&k); - } else { - self.storage.insert(k, v); - } - } - - self.hashed_cursor_factory = MockHashedCursorFactory::new( - BTreeMap::new(), - once((HASHED_ADDRESS, self.storage.clone())).collect(), - ); - - let empty_trie_cursor_factory = MockTrieCursorFactory::new( - BTreeMap::new(), - once((HASHED_ADDRESS, BTreeMap::new())).collect(), - ); - - // Compute storage root and trie updates. - let (original_root, _, storage_trie_updates) = { - let _span = trace_span!(target: TRACE_TARGET, "base_root_calc").entered(); - trace!(target: TRACE_TARGET, "Calculating root and trie updates of base dataset"); - - StorageRoot::new_hashed( - empty_trie_cursor_factory, - self.hashed_cursor_factory.clone(), - HASHED_ADDRESS, - PrefixSet::default(), - #[cfg(feature = "metrics")] - reth_trie::metrics::TrieRootMetrics::new(reth_trie::TrieType::Storage), - ) - .root_with_updates() - .expect("StorageRoot should succeed") - }; - - self.trie_cursor_factory = MockTrieCursorFactory::new( - BTreeMap::new(), - once(( - HASHED_ADDRESS, - storage_trie_updates - .storage_nodes - .iter() - .map(|(k, v)| (*k, v.clone())) - .collect(), - )) - .collect(), - ); - - self.original_root = original_root; - self.storage_trie_updates = storage_trie_updates; - } - - /// Removes all entries from `updates` that are redundant with the starting storage - /// trie updates. - /// - /// A storage node is redundant if it exists in the starting set with the same value. - /// A removed node is redundant if it was already absent from the starting set. - /// The `is_deleted` flag is cleared if it matches the starting value. - fn minimize_trie_updates(&self, updates: &mut StorageTrieUpdates) { - // Clear is_deleted if it matches the starting set. - if updates.is_deleted == self.storage_trie_updates.is_deleted { - updates.is_deleted = false; - } - - // StorageTrieUpdates::finalize can leave the same path in both storage_nodes - // and removed_nodes. Per into_sorted, updated nodes take precedence over - // removed ones. Record which paths had an update before minimization so we - // can drop their corresponding removals. - let paths_with_updates: HashSet = - updates.storage_nodes.keys().copied().collect(); - - // Remove storage nodes identical to the starting set. - updates.storage_nodes.retain(|path, node| { - self.storage_trie_updates.storage_nodes.get(path) != Some(node) - }); - - // Remove removed_nodes for paths absent from the starting set, and also - // for paths that had a storage_nodes entry (update takes precedence over - // removal). - updates.removed_nodes.retain(|path| { - self.storage_trie_updates.storage_nodes.contains_key(path) && - !paths_with_updates.contains(path) - }); - } - - /// Removes all entries from `updates` that are redundant with the starting storage - /// trie updates. Same logic as [`Self::minimize_trie_updates`] but for - /// [`SparseTrieUpdates`]. - fn minimize_sparse_updates(&self, updates: &mut crate::SparseTrieUpdates) { - // Remove updated nodes identical to the starting set. - updates.updated_nodes.retain(|path, node| { - self.storage_trie_updates.storage_nodes.get(path) != Some(node) - }); - - // Remove removed_nodes for paths absent from the starting set. - updates - .removed_nodes - .retain(|path| self.storage_trie_updates.storage_nodes.contains_key(path)); + Self { inner: TrieTestHarness::new(storage) } } /// Computes the new storage root and trie updates after applying the given changes @@ -2995,50 +2861,11 @@ mod tests { apst: &mut ArenaParallelSparseTrie, changes: BTreeMap, ) { - // Build prefix set from changed keys. - let mut prefix_set = PrefixSetMut::with_capacity(changes.len()); - for hashed_slot in changes.keys() { - prefix_set.insert(Nibbles::unpack(hashed_slot)); - } - let prefix_set = prefix_set.freeze(); - // Compute expected root and trie updates via StorageRoot. let (expected_root, mut expected_trie_updates) = if changes.is_empty() { - (self.original_root, Default::default()) + (self.original_root(), Default::default()) } else { - // Build sorted overlay from changes. - let hashed_storage = - HashedStorage::from_iter(false, changes.iter().map(|(&k, &v)| (k, v))); - - let overlay = HashedPostStateSorted::new( - Vec::new(), - once((HASHED_ADDRESS, hashed_storage.into_sorted())).collect(), - ); - - // Create overlay cursor factory on top of the existing base. - let overlay_cursor_factory = - HashedPostStateCursorFactory::new(self.hashed_cursor_factory.clone(), &overlay); - - let (root, _, trie_updates) = { - let _span = trace_span!(target: TRACE_TARGET, "changeset_root_calc").entered(); - trace!( - target: TRACE_TARGET, - "Recalculating root and trie updates with changeset applied", - ); - - StorageRoot::new_hashed( - self.trie_cursor_factory.clone(), - overlay_cursor_factory, - HASHED_ADDRESS, - prefix_set, - #[cfg(feature = "metrics")] - reth_trie::metrics::TrieRootMetrics::new(reth_trie::TrieType::Storage), - ) - .root_with_updates() - .expect("StorageRoot should succeed") - }; - - (root, trie_updates) + self.get_root_with_updates(&changes) }; self.minimize_trie_updates(&mut expected_trie_updates); @@ -3070,14 +2897,22 @@ mod tests { break; } - let mut proof_nodes = self.proof_v2(&mut targets); + let (mut proof_nodes, _) = self.proof_v2(&mut targets); apst.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed"); } // Compute root and take updates from the APST. let actual_root = apst.root(); let mut actual_updates = apst.take_updates(); - self.minimize_sparse_updates(&mut actual_updates); + + // Minimize sparse updates inline (can't use TrieTestHarness::minimize_sparse_updates + // due to the crate's SparseTrieUpdates being a different type than reth-trie's copy). + actual_updates.updated_nodes.retain(|path, node| { + self.storage_trie_updates().storage_nodes.get(path) != Some(node) + }); + actual_updates + .removed_nodes + .retain(|path| self.storage_trie_updates().storage_nodes.contains_key(path)); pretty_assertions::assert_eq!( expected_trie_updates.storage_nodes.into_iter().collect::>().sort(), @@ -3091,42 +2926,6 @@ mod tests { ); assert_eq!(expected_root, actual_root, "storage root mismatch"); } - - /// Obtains the root node of the storage trie via `StorageProofCalculator`. - fn root_node(&self) -> ProofTrieNodeV2 { - let trie_cursor = self - .trie_cursor_factory - .storage_trie_cursor(HASHED_ADDRESS) - .expect("storage trie cursor should succeed"); - let hashed_cursor = self - .hashed_cursor_factory - .hashed_storage_cursor(HASHED_ADDRESS) - .expect("hashed storage cursor should succeed"); - - let mut proof_calculator = - StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); - proof_calculator - .storage_root_node(HASHED_ADDRESS) - .expect("storage_root_node should succeed") - } - - /// Generates storage proofs for the given targets using `StorageProofCalculator`. - fn proof_v2(&self, targets: &mut [ProofV2Target]) -> Vec { - let trie_cursor = self - .trie_cursor_factory - .storage_trie_cursor(HASHED_ADDRESS) - .expect("storage trie cursor should succeed"); - let hashed_cursor = self - .hashed_cursor_factory - .hashed_storage_cursor(HASHED_ADDRESS) - .expect("hashed storage cursor should succeed"); - - let mut proof_calculator = - StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); - proof_calculator - .storage_proof(HASHED_ADDRESS, targets) - .expect("proof_v2 should succeed") - } } use proptest::prelude::*; @@ -3207,7 +3006,7 @@ mod tests { harness.apply_changeset(changeset1); // Pick N random keys from the current storage as retained leaves for pruning. - let mut all_storage_keys: Vec = harness.storage.keys() + let mut all_storage_keys: Vec = harness.storage().keys() .map(|k| Nibbles::unpack(*k)) .collect(); all_storage_keys.shuffle(&mut rng); @@ -3217,7 +3016,7 @@ mod tests { let retained: Vec = all_storage_keys[..num_retain].to_vec(); apst.prune(&retained); - let changeset2 = build_changeset(&harness.storage, changeset2_new_keys, overlap_pct, delete_pct, &mut rng); + let changeset2 = build_changeset(harness.storage(), changeset2_new_keys, overlap_pct, delete_pct, &mut rng); for (i, (k, v)) in changeset2.iter().enumerate() { trace!(target: TRACE_TARGET, ?i, ?k, ?v, "Changeset 2 entry"); } diff --git a/crates/trie/sparse/tests/suite/commit_updates.rs b/crates/trie/sparse/tests/suite/commit_updates.rs index 63fe58e9fe..da1917201d 100644 --- a/crates/trie/sparse/tests/suite/commit_updates.rs +++ b/crates/trie/sparse/tests/suite/commit_updates.rs @@ -63,7 +63,7 @@ pub(super) fn test_commit_updates_syncs_branch_masks() (key_e, U256::from(5)), ]); let expected_harness = SuiteTestHarness::new(expected_storage); - assert_eq!(hash2, expected_harness.original_root, "hash2 should match reference trie"); + assert_eq!(hash2, expected_harness.original_root(), "hash2 should match reference trie"); // updates2 should NOT contain the same paths as updates1 — commit_updates // resets the baseline so only the delta from round 2 is reported. @@ -93,7 +93,7 @@ pub(super) fn test_commit_updates_empty_is_noop() { let mut trie: T = harness.init_trie_fully_revealed(true); let hash1 = trie.root(); - assert_eq!(hash1, harness.original_root); + assert_eq!(hash1, harness.original_root()); trie.commit_updates(&HashMap::default(), &HashSet::default()); diff --git a/crates/trie/sparse/tests/suite/lifecycle.rs b/crates/trie/sparse/tests/suite/lifecycle.rs index 13367ebc30..7d33a2e2a8 100644 --- a/crates/trie/sparse/tests/suite/lifecycle.rs +++ b/crates/trie/sparse/tests/suite/lifecycle.rs @@ -47,7 +47,7 @@ pub(super) fn test_full_lifecycle_update_root_take_commit = BTreeMap::new(); @@ -135,7 +135,11 @@ pub(super) fn test_multi_round_update_commit_prune_cycle() expected_storage.remove(&keys[2]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference after modifications and removal" ); } @@ -229,7 +234,7 @@ pub(super) fn test_incremental_reveal_and_update_with_retry() { a1_storage.insert(key, U256::from(i as u64 + 1)); } let mut a1_harness = SuiteTestHarness::new(a1_storage.clone()); - let a1_initial_root = a1_harness.original_root; + let a1_initial_root = a1_harness.original_root(); // A2 storage trie: 3 slots let mut a2_storage: BTreeMap = BTreeMap::new(); @@ -286,7 +292,7 @@ pub(super) fn test_full_block_processing_lifecycle() { a2_storage.insert(key, U256::from(i as u64 + 10)); } let mut a2_harness = SuiteTestHarness::new(a2_storage.clone()); - let a2_initial_root = a2_harness.original_root; + let a2_initial_root = a2_harness.original_root(); // Account trie keys. let mut acct_keys = Vec::new(); @@ -351,10 +357,10 @@ pub(super) fn test_full_block_processing_lifecycle() { // Verify storage roots match references. a1_harness.apply_changeset(a1_changeset); - assert_eq!(sr1, a1_harness.original_root, "A1 storage root should match reference"); + assert_eq!(sr1, a1_harness.original_root(), "A1 storage root should match reference"); a2_harness.apply_changeset(a2_changeset); - assert_eq!(sr2, a2_harness.original_root, "A2 storage root should match reference"); + assert_eq!(sr2, a2_harness.original_root(), "A2 storage root should match reference"); // --- Account promotion phase --- // Encode A1 with sr1, A2 with sr2, modify A3 (balance change = different value). @@ -370,7 +376,7 @@ pub(super) fn test_full_block_processing_lifecycle() { // Verify state root matches reference. acct_harness.apply_changeset(acct_changeset); - assert_eq!(state_root, acct_harness.original_root, "state root should match reference"); + assert_eq!(state_root, acct_harness.original_root(), "state root should match reference"); // --- Finalize: take_updates --- let acct_updates = acct_trie.take_updates(); @@ -449,7 +455,11 @@ pub(super) fn test_touched_prewarm_then_changed_update( let root = trie.root(); harness.apply_changeset(changeset); - assert_eq!(root, harness.original_root, "root should match reference after Touched + Changed"); + assert_eq!( + root, + harness.original_root(), + "root should match reference after Touched + Changed" + ); } /// Touched on blinded path triggers proof callback, then Changed @@ -499,7 +509,7 @@ pub(super) fn test_touched_on_blinded_triggers_proof_then_changed_succeeds< assert!(!leaf_updates.is_empty(), "Touched key should remain in map when blinded"); // Step 2: Reveal the proof for the requested targets. - let mut proof_nodes = harness.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal should succeed"); // Step 3: Replace Touched with Changed(new_value) in the map. @@ -519,7 +529,8 @@ pub(super) fn test_touched_on_blinded_triggers_proof_then_changed_succeeds< changeset.insert(target_key, new_value); harness.apply_changeset(changeset); assert_eq!( - root, harness.original_root, + root, + harness.original_root(), "root should match reference after Touched-miss → reveal → Changed" ); } @@ -570,7 +581,8 @@ pub(super) fn test_get_leaf_value_for_storage_root_lookup() { let root1 = trie.root(); harness.apply_changeset(changeset1); - assert_eq!(root1, harness.original_root, "block 1 root should match reference"); + assert_eq!(root1, harness.original_root(), "block 1 root should match reference"); let updates1 = trie.take_updates(); trie.commit_updates(&updates1.updated_nodes, &updates1.removed_nodes); @@ -683,7 +695,8 @@ pub(super) fn test_prune_then_reuse_for_next_block() { harness.apply_changeset(changeset_hot); assert_eq!( - root_hot, harness.original_root, + root_hot, + harness.original_root(), "hot path root should match reference (K1 updated)" ); @@ -696,7 +709,8 @@ pub(super) fn test_prune_then_reuse_for_next_block() { harness.apply_changeset(changeset_cold); assert_eq!( - root_cold, harness.original_root, + root_cold, + harness.original_root(), "cold path root should match reference (K1 and K5 updated)" ); } diff --git a/crates/trie/sparse/tests/suite/main.rs b/crates/trie/sparse/tests/suite/main.rs index b27c165cee..a770c197da 100644 --- a/crates/trie/sparse/tests/suite/main.rs +++ b/crates/trie/sparse/tests/suite/main.rs @@ -22,16 +22,8 @@ use alloy_primitives::{map::B256Map, B256, U256}; use alloy_rlp::{encode_fixed_size, Decodable}; use alloy_trie::EMPTY_ROOT_HASH; -use reth_trie::{ - hashed_cursor::{mock::MockHashedCursorFactory, HashedCursorFactory}, - prefix_set::PrefixSet, - proof_v2::StorageProofCalculator, - trie_cursor::{mock::MockTrieCursorFactory, TrieCursorFactory}, - StorageRoot, -}; -use reth_trie_common::{ - updates::StorageTrieUpdates, Nibbles, ProofTrieNodeV2, ProofV2Target, TrieNodeV2, -}; +use reth_trie::test_utils::TrieTestHarness; +use reth_trie_common::{Nibbles, ProofV2Target, TrieNodeV2}; use reth_trie_sparse::{LeafLookup, LeafLookupError, LeafUpdate, SparseTrie}; use std::{ collections::{BTreeMap, HashMap, HashSet}, @@ -51,122 +43,36 @@ mod take_updates; mod update_leaves; mod wipe_clear; -/// A fixed hashed address used by the harness for all storage trie operations. -const HASHED_ADDRESS: B256 = B256::ZERO; - // --------------------------------------------------------------------------- // Test harness // --------------------------------------------------------------------------- -/// Generic test harness for `SparseTrie` tests. +/// Test harness for `SparseTrie` tests. /// -/// Manages a base storage dataset, computes expected roots via `StorageRoot`, and generates -/// V2 proofs via `StorageProofCalculator` using mock cursors. +/// Wraps [`TrieTestHarness`] and adds `SparseTrie`-specific helpers for reveal-update loops, +/// trie initialization, and leaf update construction. struct SuiteTestHarness { - /// The base storage dataset (hashed slot → value). Zero-valued entries are absent. - storage: BTreeMap, - /// The expected storage root, calculated by `StorageRoot`. - original_root: B256, - /// The starting storage trie updates, used for minimization. - storage_trie_updates: StorageTrieUpdates, - /// Mock factory for trie cursors. - trie_cursor_factory: MockTrieCursorFactory, - /// Mock factory for hashed cursors. - hashed_cursor_factory: MockHashedCursorFactory, + /// The inner general-purpose harness. + inner: TrieTestHarness, +} + +impl std::ops::Deref for SuiteTestHarness { + type Target = TrieTestHarness; + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl std::ops::DerefMut for SuiteTestHarness { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.inner + } } impl SuiteTestHarness { /// Creates a new test harness from a map of hashed storage slots to values. fn new(storage: BTreeMap) -> Self { - let mut harness = Self { - storage: BTreeMap::new(), - original_root: B256::ZERO, - storage_trie_updates: StorageTrieUpdates::default(), - trie_cursor_factory: MockTrieCursorFactory::new(BTreeMap::new(), Default::default()), - hashed_cursor_factory: MockHashedCursorFactory::new( - BTreeMap::new(), - Default::default(), - ), - }; - harness.apply_changeset(storage); - harness - } - - /// Merges `changeset` into the base storage (zero values remove entries) and - /// recomputes the storage root, trie updates, and cursor factories. - fn apply_changeset(&mut self, changeset: BTreeMap) { - for (k, v) in changeset { - if v == U256::ZERO { - self.storage.remove(&k); - } else { - self.storage.insert(k, v); - } - } - - self.hashed_cursor_factory = MockHashedCursorFactory::new( - BTreeMap::new(), - once((HASHED_ADDRESS, self.storage.clone())).collect(), - ); - - let empty_trie_cursor_factory = MockTrieCursorFactory::new( - BTreeMap::new(), - once((HASHED_ADDRESS, BTreeMap::new())).collect(), - ); - - let (original_root, _, storage_trie_updates) = StorageRoot::new_hashed( - empty_trie_cursor_factory, - self.hashed_cursor_factory.clone(), - HASHED_ADDRESS, - PrefixSet::default(), - #[cfg(feature = "metrics")] - reth_trie::metrics::TrieRootMetrics::new(reth_trie::TrieType::Storage), - ) - .root_with_updates() - .expect("StorageRoot should succeed"); - - self.trie_cursor_factory = MockTrieCursorFactory::new( - BTreeMap::new(), - once(( - HASHED_ADDRESS, - storage_trie_updates.storage_nodes.iter().map(|(k, v)| (*k, v.clone())).collect(), - )) - .collect(), - ); - - self.original_root = original_root; - self.storage_trie_updates = storage_trie_updates; - } - - /// Obtains the root node of the storage trie via `StorageProofCalculator`. - fn root_node(&self) -> ProofTrieNodeV2 { - let trie_cursor = self - .trie_cursor_factory - .storage_trie_cursor(HASHED_ADDRESS) - .expect("storage trie cursor should succeed"); - let hashed_cursor = self - .hashed_cursor_factory - .hashed_storage_cursor(HASHED_ADDRESS) - .expect("hashed storage cursor should succeed"); - - let mut proof_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); - proof_calculator - .storage_root_node(HASHED_ADDRESS) - .expect("storage_root_node should succeed") - } - - /// Generates storage proofs for the given targets using `StorageProofCalculator`. - fn proof_v2(&self, targets: &mut [ProofV2Target]) -> Vec { - let trie_cursor = self - .trie_cursor_factory - .storage_trie_cursor(HASHED_ADDRESS) - .expect("storage trie cursor should succeed"); - let hashed_cursor = self - .hashed_cursor_factory - .hashed_storage_cursor(HASHED_ADDRESS) - .expect("hashed storage cursor should succeed"); - - let mut proof_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); - proof_calculator.storage_proof(HASHED_ADDRESS, targets).expect("proof_v2 should succeed") + Self { inner: TrieTestHarness::new(storage) } } /// Builds leaf updates from a changeset. Non-zero values become inserts/modifies, @@ -204,7 +110,7 @@ impl SuiteTestHarness { break; } - let mut proof_nodes = self.proof_v2(&mut targets); + let (mut proof_nodes, _) = self.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed"); } } @@ -224,7 +130,7 @@ impl SuiteTestHarness { if !target_keys.is_empty() { let mut targets: Vec = target_keys.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_nodes = self.proof_v2(&mut targets); + let (mut proof_nodes, _) = self.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed"); } @@ -233,7 +139,7 @@ impl SuiteTestHarness { /// Initializes a trie and reveals proofs for all keys in the base storage. fn init_trie_fully_revealed(&self, retain_updates: bool) -> T { - let keys: Vec = self.storage.keys().copied().collect(); + let keys: Vec = self.storage().keys().copied().collect(); self.init_trie_with_targets(&keys, retain_updates) } } diff --git a/crates/trie/sparse/tests/suite/prune.rs b/crates/trie/sparse/tests/suite/prune.rs index ddeb16ebb4..23c62eb0eb 100644 --- a/crates/trie/sparse/tests/suite/prune.rs +++ b/crates/trie/sparse/tests/suite/prune.rs @@ -182,7 +182,7 @@ pub(super) fn test_prune_then_update_and_recompute_root let mut expected_storage = storage; expected_storage.insert(keys[0], new_value); let expected_harness = SuiteTestHarness::new(expected_storage); - let expected_root = expected_harness.original_root; + let expected_root = expected_harness.original_root(); assert_eq!(root_after, expected_root, "root after prune + update should match reference trie"); } @@ -217,7 +217,7 @@ pub(super) fn test_prune_then_reveal_pruned_subtree() { let mut expected_storage = storage; expected_storage.insert(keys[2], new_value); let expected_harness = SuiteTestHarness::new(expected_storage); - let expected_root = expected_harness.original_root; + let expected_root = expected_harness.original_root(); assert_eq!( root_after, expected_root, @@ -338,7 +338,7 @@ pub(super) fn test_prune_handles_small_subtrie_root_nodes() { // Set root and reveal only one leaf's proof. let mut trie: T = harness.init_trie_with_targets(&[key_a], true); let root = trie.root(); - assert_eq!(root, harness.original_root); + assert_eq!(root, harness.original_root()); } /// Double reveal doesn't corrupt state. @@ -68,12 +68,12 @@ pub(super) fn test_reveal_nodes_idempotent() { // First reveal: set root and reveal all proof nodes. let mut trie: T = harness.init_trie_fully_revealed(true); let root_first = trie.root(); - assert_eq!(root_first, harness.original_root); + assert_eq!(root_first, harness.original_root()); // Second reveal: reveal the same proof nodes again. - let keys: Vec = harness.storage.keys().copied().collect(); + let keys: Vec = harness.storage().keys().copied().collect(); let mut targets: Vec = keys.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_nodes = harness.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("second reveal_nodes should succeed"); let root_second = trie.root(); @@ -137,9 +137,9 @@ pub(super) fn test_reveal_nodes_skips_on_empty_root() { ]); let harness = SuiteTestHarness::new(storage); - let keys: Vec = harness.storage.keys().copied().collect(); + let keys: Vec = harness.storage().keys().copied().collect(); let mut targets: Vec = keys.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_nodes = harness.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness.proof_v2(&mut targets); // Create a trie with an empty root. let mut trie = T::default(); @@ -196,33 +196,38 @@ pub(super) fn test_reveal_nodes_filters_unreachable_boundary_leaves = keys_a.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_a = harness.proof_v2(&mut targets_a); + let (mut proof_a, _) = harness.proof_v2(&mut targets_a); trie.reveal_nodes(&mut proof_a).expect("reveal group A should succeed"); // Verify root is correct with partial reveal. let root_after_a = trie.root(); - assert_eq!(root_after_a, harness.original_root, "root after group A reveal should match"); + assert_eq!(root_after_a, harness.original_root(), "root after group A reveal should match"); // Now generate proofs for group B keys and reveal them as extra nodes. // These include boundary leaves that extend the trie into subtries the partial // reveal didn't cover. They should be handled gracefully. let mut targets_b: Vec = keys_b.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_b = harness.proof_v2(&mut targets_b); + let (mut proof_b, _) = harness.proof_v2(&mut targets_b); trie.reveal_nodes(&mut proof_b).expect("reveal extra group B nodes should succeed"); // Root must still be correct — extra reveals should not corrupt state. let root_after_b = trie.root(); - assert_eq!(root_after_b, harness.original_root, "root should be unchanged after extra reveal"); + assert_eq!( + root_after_b, + harness.original_root(), + "root should be unchanged after extra reveal" + ); // Additionally, reveal ALL proofs at once (including duplicates) — still correct. let mut targets_all: Vec = keys_a.iter().chain(keys_b.iter()).map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_all = harness.proof_v2(&mut targets_all); + let (mut proof_all, _) = harness.proof_v2(&mut targets_all); trie.reveal_nodes(&mut proof_all).expect("reveal all nodes should succeed"); let root_after_all = trie.root(); assert_eq!( - root_after_all, harness.original_root, + root_after_all, + harness.original_root(), "root should be unchanged after revealing all proofs" ); } @@ -254,7 +259,7 @@ pub(super) fn test_reveal_insert_reveal_preserves_branch_state() { let mut trie: T = harness.init_trie_fully_revealed(false); let original_root = trie.root(); - assert_eq!(original_root, harness.original_root, "initial root should match reference"); + assert_eq!(original_root, harness.original_root(), "initial root should match reference"); // Modify key_b's value from 2 to 999. let changes: BTreeMap = BTreeMap::from([(key_b, U256::from(999))]); @@ -65,7 +65,8 @@ pub(super) fn test_root_after_single_leaf_update() { BTreeMap::from([(key_a, U256::from(1)), (key_b, U256::from(999)), (key_c, U256::from(3))]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - new_root, expected_harness.original_root, + new_root, + expected_harness.original_root(), "root should match updated reference trie" ); } @@ -124,7 +125,7 @@ pub(super) fn test_root_deterministic_across_update_orders = pairs.iter().copied().collect(); let harness = SuiteTestHarness::new(all_storage); - assert_eq!(root_a, harness.original_root, "root should match reference trie"); + assert_eq!(root_a, harness.original_root(), "root should match reference trie"); } /// Small RLP root (<32 bytes) handled correctly. @@ -142,7 +143,7 @@ pub(super) fn test_root_handles_small_root_node_without_hash() { let harness = SuiteTestHarness::new(storage); let mut trie: T = harness.init_trie_fully_revealed(true); let root = trie.root(); - assert_eq!(root, harness.original_root); + assert_eq!(root, harness.original_root()); } /// Single-leaf root initializes correctly. @@ -31,7 +31,7 @@ pub(super) fn test_set_root_with_leaf_node() { let mut trie = T::default(); trie.set_root(root_node.node, root_node.masks, true).expect("set_root should succeed"); let root = trie.root(); - assert_eq!(root, harness.original_root); + assert_eq!(root, harness.original_root()); } /// Extension root (shared prefix) initializes correctly. @@ -51,7 +51,7 @@ pub(super) fn test_set_root_with_extension_node() { let harness = SuiteTestHarness::new(storage); let mut trie: T = harness.init_trie_fully_revealed(true); let root = trie.root(); - assert_eq!(root, harness.original_root); + assert_eq!(root, harness.original_root()); } /// `retain_updates=true` enables update tracking. diff --git a/crates/trie/sparse/tests/suite/update_leaves.rs b/crates/trie/sparse/tests/suite/update_leaves.rs index e8518e2c49..fc3f137ad4 100644 --- a/crates/trie/sparse/tests/suite/update_leaves.rs +++ b/crates/trie/sparse/tests/suite/update_leaves.rs @@ -38,7 +38,8 @@ pub(super) fn test_update_leaves_insert_new_leaf() { ]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference trie with 4 leaves" ); } @@ -72,7 +73,8 @@ pub(super) fn test_update_leaves_modify_existing_leaf() BTreeMap::from([(key1, U256::from(1)), (key2, new_value), (key3, U256::from(3))]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference trie with modified leaf value" ); } @@ -100,7 +102,8 @@ pub(super) fn test_insert_single_leaf_into_empty_trie() let expected_harness = SuiteTestHarness::new(BTreeMap::from([(key, value)])); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference single-leaf trie" ); } @@ -134,7 +137,7 @@ pub(super) fn test_insert_multiple_leaves_into_empty_trie() .expect("update_leaves should succeed"); let hash1 = trie.root(); - assert_eq!(hash1, expected_old.original_root, "hash1 should match reference with old values"); + assert_eq!(hash1, expected_old.original_root(), "hash1 should match reference with old values"); // Update all 256 keys with new values. let mut leaf_updates = SuiteTestHarness::leaf_updates(&new_storage); @@ -182,7 +185,7 @@ pub(super) fn test_update_all_leaves_with_new_values() .expect("update_leaves should succeed"); let hash2 = trie.root(); - assert_eq!(hash2, expected_new.original_root, "hash2 should match reference with new values"); + assert_eq!(hash2, expected_new.original_root(), "hash2 should match reference with new values"); assert_ne!(hash1, hash2, "roots should differ after updating all values"); } @@ -218,7 +221,7 @@ pub(super) fn test_two_leaves_at_adjacent_keys_root_correctness() { let expected_storage = BTreeMap::from([(key1, U256::from(1)), (key3, U256::from(3))]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference trie with removed leaf" ); } @@ -299,7 +303,8 @@ pub(super) fn test_remove_leaf_branch_collapses_to_extension let expected_storage = BTreeMap::from([(key_b, U256::from(200))]); let expected_harness = SuiteTestHarness::new(expected_storage); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference trie after branch-to-leaf collapse" ); @@ -401,7 +407,7 @@ pub(super) fn test_insert_then_remove_sequence() { harness.reveal_and_update(&mut trie, &mut leaf_updates); let root_after_insert = trie.root(); - assert_eq!(root_after_insert, harness.original_root, "root after inserting all 6 leaves"); + assert_eq!(root_after_insert, harness.original_root(), "root after inserting all 6 leaves"); // Remove leaves one at a time in the same order as the original test: k3, k1, k4, k6, k2, k5. let removal_order = [k3, k1, k4, k6, k2, k5]; @@ -414,7 +420,7 @@ pub(super) fn test_insert_then_remove_sequence() { let root = trie.root(); assert_eq!( root, - harness.original_root, + harness.original_root(), "root mismatch after removal step {} (removed key {:?})", i + 1, key @@ -443,7 +449,7 @@ pub(super) fn test_remove_nonexistent_leaf_preserves_hashes() { assert!(!leaf_updates.is_empty(), "key should remain in map after blinded hit"); // Reveal the proof for the requested targets. - let mut proof_nodes = harness.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed"); // Second update_leaves: now the path is revealed, key should be drained. @@ -572,7 +578,11 @@ pub(super) fn test_update_leaves_retry_after_reveal() { let expected_harness = SuiteTestHarness::new(expected_storage); let root = trie.root(); - assert_eq!(root, expected_harness.original_root, "root should match expected trie after retry"); + assert_eq!( + root, + expected_harness.original_root(), + "root should match expected trie after retry" + ); } pub(super) fn test_remove_leaf_blinded_sibling_requires_reveal() { @@ -613,7 +623,7 @@ pub(super) fn test_remove_leaf_blinded_sibling_requires_reveal let root = trie.root(); assert_eq!( - root, expected_harness.original_root, + root, + expected_harness.original_root(), "root should match reference trie with mixed updates applied" ); } @@ -1034,7 +1046,7 @@ pub(super) fn test_remove_leaf_marks_ancestors_dirty_unconditionally = keys.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_nodes = harness.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed"); // Insert all leaves via update_leaves — do NOT call root() so no hashes are cached. @@ -1058,7 +1070,8 @@ pub(super) fn test_remove_leaf_marks_ancestors_dirty_unconditionally = once((key_c, U256::ZERO)).collect(); @@ -1129,7 +1142,7 @@ pub(super) fn test_orphaned_value_update_falls_through_to_full_insertion< let mut removal_updates = SuiteTestHarness::leaf_updates(&removal); harness.reveal_and_update(&mut trie, &mut removal_updates); let root2 = trie.root(); - assert_eq!(root2, harness.original_root, "root after removal should match"); + assert_eq!(root2, harness.original_root(), "root after removal should match"); // Step 2: Re-insert key_c with a new value — this re-creates the branch. let reinsert: BTreeMap = once((key_c, U256::from(33))).collect(); @@ -1137,7 +1150,7 @@ pub(super) fn test_orphaned_value_update_falls_through_to_full_insertion< let mut reinsert_updates = SuiteTestHarness::leaf_updates(&reinsert); harness.reveal_and_update(&mut trie, &mut reinsert_updates); let root3 = trie.root(); - assert_eq!(root3, harness.original_root, "root after re-insert should match"); + assert_eq!(root3, harness.original_root(), "root after re-insert should match"); // Step 3: Update key_a — previously could be orphaned if branch collapse // didn't maintain structural tracking properly. @@ -1146,7 +1159,7 @@ pub(super) fn test_orphaned_value_update_falls_through_to_full_insertion< let mut update_updates = SuiteTestHarness::leaf_updates(&update); harness.reveal_and_update(&mut trie, &mut update_updates); let root4 = trie.root(); - assert_eq!(root4, harness.original_root, "root after updating key_a should match"); + assert_eq!(root4, harness.original_root(), "root after updating key_a should match"); // Verify the invariant: every key with a value must be findable via find_leaf. let final_keys = [key_a, key_b, key_c, key_d, key_e]; @@ -1193,7 +1206,8 @@ pub(super) fn test_branch_collapse_updates_leaf_key_len_across_subtries() { // Compute root to confirm the trie is populated. let root_before = trie.root(); - assert_eq!(root_before, harness.original_root); + assert_eq!(root_before, harness.original_root()); assert_ne!(root_before, EMPTY_ROOT_HASH); // Wipe and verify empty root. @@ -41,7 +41,7 @@ pub(super) fn test_clear_resets_trie_but_preserves_update_tracking() { // Compute root to populate the trie fully. let root_before = trie.root(); - assert_eq!(root_before, harness.original_root); + assert_eq!(root_before, harness.original_root()); assert_ne!(root_before, EMPTY_ROOT_HASH); // Wipe the trie. @@ -103,7 +103,7 @@ pub(super) fn test_clear_then_reuse_trie() { let mut trie: T = harness_1.init_trie_fully_revealed(false); let root_1 = trie.root(); - assert_eq!(root_1, harness_1.original_root); + assert_eq!(root_1, harness_1.original_root()); assert_ne!(root_1, EMPTY_ROOT_HASH); // Phase 2: clear the trie and verify empty. @@ -125,7 +125,7 @@ pub(super) fn test_clear_then_reuse_trie() { let keys_2: Vec = storage_2.keys().copied().collect(); let mut targets: Vec = keys_2.iter().map(|k| ProofV2Target::new(*k)).collect(); - let mut proof_nodes = harness_2.proof_v2(&mut targets); + let (mut proof_nodes, _) = harness_2.proof_v2(&mut targets); trie.reveal_nodes(&mut proof_nodes).expect("reveal_nodes should succeed on cleared trie"); // Phase 4: insert a 4th leaf. @@ -140,7 +140,7 @@ pub(super) fn test_clear_then_reuse_trie() { // Update the reference harness with the 4th leaf. harness_2.apply_changeset(changeset); - assert_eq!(root_2, harness_2.original_root, "root after clear+reuse must match reference"); + assert_eq!(root_2, harness_2.original_root(), "root after clear+reuse must match reference"); assert_ne!(root_2, root_1, "new root must differ from pre-clear root"); assert_ne!(root_2, EMPTY_ROOT_HASH, "new root must not be empty"); } diff --git a/crates/trie/trie/src/proof_v2/mod.rs b/crates/trie/trie/src/proof_v2/mod.rs index 402cb8cc01..3cefb50af6 100644 --- a/crates/trie/trie/src/proof_v2/mod.rs +++ b/crates/trie/trie/src/proof_v2/mod.rs @@ -1625,28 +1625,17 @@ enum PopCachedBranchOutcome { mod tests { use super::*; use crate::{ - hashed_cursor::{ - mock::MockHashedCursorFactory, HashedCursorFactory, HashedCursorMetricsCache, - InstrumentedHashedCursor, - }, - proof::Proof, - trie_cursor::{ - depth_first, mock::MockTrieCursorFactory, InstrumentedTrieCursor, TrieCursorFactory, - TrieCursorMetricsCache, - }, + hashed_cursor::HashedCursorFactory, + proof::StorageProof as LegacyStorageProof, + test_utils::TrieTestHarness, + trie_cursor::{depth_first, TrieCursorFactory}, }; - use alloy_primitives::map::{B256Map, B256Set}; + use alloy_primitives::map::B256Set; use alloy_rlp::Decodable; use alloy_trie::proof::AddedRemovedKeys; use itertools::Itertools; - use reth_primitives_traits::Account; - use reth_trie_common::{ - updates::{StorageTrieUpdates, TrieUpdates}, - HashedPostState, MultiProofTargets, ProofTrieNode, TrieNode, - }; - - /// Target to use with the `tracing` crate. - static TRACE_TARGET: &str = "trie::proof_v2::tests"; + use reth_trie_common::{ProofTrieNode, TrieNode}; + use std::collections::BTreeMap; /// Converts legacy proofs to V2 proofs by combining extension nodes with their child branch /// nodes. @@ -1666,135 +1655,78 @@ mod tests { ) } - /// A test harness for comparing `ProofCalculator` and legacy `Proof` implementations. + /// A test harness for comparing `StorageProofCalculator` and legacy `StorageProof` + /// implementations. /// - /// This harness creates mock cursor factories from a `HashedPostState` and provides - /// a method to test that both proof implementations produce equivalent results. + /// Wraps [`TrieTestHarness`] and adds a method to test that both proof implementations + /// produce equivalent results for storage proofs. struct ProofTestHarness { - /// Mock factory for trie cursors (empty by default for leaf-only tests) - trie_cursor_factory: MockTrieCursorFactory, - /// Mock factory for hashed cursors, populated from `HashedPostState` - hashed_cursor_factory: MockHashedCursorFactory, - /// The expected state root, calculated by `StateRoot` - expected_root: B256, + inner: TrieTestHarness, + } + + impl std::ops::Deref for ProofTestHarness { + type Target = TrieTestHarness; + fn deref(&self) -> &Self::Target { + &self.inner + } } impl ProofTestHarness { - /// Creates a new test harness from a `HashedPostState`. - /// - /// The `HashedPostState` is used to populate the mock hashed cursor factory directly. - /// The trie cursor factory is initialized from `TrieUpdates` generated by `StateRoot`. - fn new(post_state: HashedPostState) -> Self { - // Create empty trie cursor factory to serve as the initial state for StateRoot - // Ensure that there's a storage trie dataset for every account, to make - // `MockTrieCursorFactory` happy. - let storage_tries: B256Map<_> = post_state - .accounts - .keys() - .copied() - .map(|addr| (addr, StorageTrieUpdates::default())) - .collect(); - - let empty_trie_cursor_factory = MockTrieCursorFactory::from_trie_updates(TrieUpdates { - storage_tries: storage_tries.clone(), - ..Default::default() - }); - - // Create mock hashed cursor factory from the post state - let hashed_cursor_factory = MockHashedCursorFactory::from_hashed_post_state(post_state); - - // Generate TrieUpdates using StateRoot - let (expected_root, mut trie_updates) = - crate::StateRoot::new(empty_trie_cursor_factory, hashed_cursor_factory.clone()) - .root_with_updates() - .expect("StateRoot should succeed"); - - // Continue using empty storage tries for each account, to keep `MockTrieCursorFactory` - // happy. - trie_updates.storage_tries = storage_tries; - - // Initialize trie cursor factory from the generated TrieUpdates - let trie_cursor_factory = MockTrieCursorFactory::from_trie_updates(trie_updates); - - Self { trie_cursor_factory, hashed_cursor_factory, expected_root } + /// Creates a new test harness from a map of hashed storage slots to values. + fn new(storage: BTreeMap) -> Self { + Self { inner: TrieTestHarness::new(storage) } } - /// Asserts that `ProofCalculator` and legacy `Proof` produce equivalent results for account - /// proofs. - /// - /// This method calls both implementations with the given account targets and compares - /// the results. + /// Asserts that `StorageProofCalculator` and legacy `StorageProof` produce equivalent + /// results for storage proofs. fn assert_proof( &self, targets: impl IntoIterator, ) -> Result<(), StateProofError> { - let targets_vec = targets.into_iter().collect::>(); + let mut targets_vec = targets.into_iter().collect::>(); - // Convert ProofV2Target keys to MultiProofTargets for legacy implementation - // For account-only proofs, each account maps to an empty storage set - // Legacy implementation only uses the keys, not the prefix + // Get v2 proof and root hash via harness + let (proof_v2_result, root_hash) = self.proof_v2(&mut targets_vec); + + // Verify the root hash matches the expected root (if the proof contains a root + // node) + if let Some(root_hash) = root_hash { + pretty_assertions::assert_eq!(self.original_root(), root_hash); + } + + // Convert ProofV2Target keys to B256Set for legacy implementation let legacy_targets = targets_vec .iter() - .map(|target| (B256::from_slice(&target.key_nibbles.pack()), B256Set::default())) - .collect::(); + .map(|target| B256::from_slice(&target.key_nibbles.pack())) + .collect::(); - // Create ProofCalculator (proof_v2) with account cursors - let trie_cursor = self.trie_cursor_factory.account_trie_cursor()?; - let hashed_cursor = self.hashed_cursor_factory.hashed_account_cursor()?; - - // Collect metrics for cursors - let mut trie_cursor_metrics = TrieCursorMetricsCache::default(); - let trie_cursor = InstrumentedTrieCursor::new(trie_cursor, &mut trie_cursor_metrics); - let mut hashed_cursor_metrics = HashedCursorMetricsCache::default(); - let hashed_cursor = - InstrumentedHashedCursor::new(hashed_cursor, &mut hashed_cursor_metrics); - - // Call ProofCalculator::proof with account targets - let mut value_encoder = SyncAccountValueEncoder::new( - self.trie_cursor_factory.clone(), - self.hashed_cursor_factory.clone(), - ); - let mut proof_calculator = ProofCalculator::new(trie_cursor, hashed_cursor); - let proof_v2_result = - proof_calculator.proof(&mut value_encoder, &mut targets_vec.clone())?; - - // Output metrics - trace!(target: TRACE_TARGET, ?trie_cursor_metrics, "V2 trie cursor metrics"); - trace!(target: TRACE_TARGET, ?hashed_cursor_metrics, "V2 hashed cursor metrics"); - - // Call Proof::multiproof (legacy implementation) - let proof_legacy_result = - Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone()) - .with_branch_node_masks(true) - .with_added_removed_keys(Some( - // This will force the HashBuilder to always retain the child branch of all - // extensions. We need this because in V2 extensions and branches are a - // single node type, so child branches are always included with extensions. - AddedRemovedKeys::default().with_assume_added(true), - )) - .multiproof(legacy_targets)?; + // Call legacy StorageProof::storage_multiproof + let proof_legacy_result = LegacyStorageProof::new_hashed( + self.trie_cursor_factory(), + self.hashed_cursor_factory(), + self.hashed_address(), + ) + .with_branch_node_masks(true) + .with_added_removed_keys(Some(AddedRemovedKeys::default().with_assume_added(true))) + .storage_multiproof(legacy_targets)?; // Helper function to check if a node path matches at least one target let node_matches_target = |node_path: &Nibbles| -> bool { targets_vec.iter().any(|target| { - // Node path must be a prefix of the target's key target.key_nibbles.starts_with(node_path) && - // Node path must be at least `min_len` long - node_path.len() >= target.min_len as usize + node_path.len() >= target.min_len as usize }) }; // Decode and sort legacy proof nodes let proof_legacy_nodes = proof_legacy_result - .account_subtree + .subtree .iter() .map(|(path, node_enc)| { let mut buf = node_enc.as_ref(); let node = TrieNode::decode(&mut buf) .expect("legacy implementation should not produce malformed proof nodes"); - // The legacy proof calculator will calculate masks for the root node, even - // though we never store the root node so the masks for it aren't really valid. let masks = if path.is_empty() { None } else { @@ -1809,68 +1741,51 @@ mod tests { // Convert legacy proofs to V2 proofs by combining extensions with their child branches let proof_legacy_nodes_v2 = convert_legacy_proofs_to_v2(&proof_legacy_nodes); - // Filter to only keep nodes which match a target. We do this after conversion so we - // don't keep branches whose extension parents are excluded due to a min_len. + // Filter both results to only keep nodes which match a target. The v2 + // storage_proof returns an EmptyRoot node even when there are no targets, so + // both sides need the same filtering. let proof_legacy_nodes_v2 = proof_legacy_nodes_v2 .into_iter() .filter(|ProofTrieNodeV2 { path, .. }| node_matches_target(path)) .collect::>(); - // Basic comparison: both should succeed and produce identical results + let proof_v2_result = proof_v2_result + .into_iter() + .filter(|ProofTrieNodeV2 { path, .. }| node_matches_target(path)) + .collect::>(); + pretty_assertions::assert_eq!(proof_legacy_nodes_v2, proof_v2_result); - // Also test root_node - get a fresh calculator and verify it returns the root node - // that hashes to the expected root - let trie_cursor = self.trie_cursor_factory.account_trie_cursor()?; - let hashed_cursor = self.hashed_cursor_factory.hashed_account_cursor()?; - let mut value_encoder = SyncAccountValueEncoder::new( - self.trie_cursor_factory.clone(), - self.hashed_cursor_factory.clone(), - ); - let mut proof_calculator = ProofCalculator::new(trie_cursor, hashed_cursor); - let root_node = proof_calculator.root_node(&mut value_encoder)?; - - // The root node should be at the empty path - assert!(root_node.path.is_empty(), "root_node should return node at empty path"); - - // The hash of the root node should match the expected root from legacy StateRoot - let root_hash = proof_calculator - .compute_root_hash(&[root_node])? - .expect("root_node returns a node at empty path"); - pretty_assertions::assert_eq!(self.expected_root, root_hash); - Ok(()) } } /// Tests that `clear_computation_state` properly resets internal stacks, allowing a - /// `ProofCalculator` to be reused after a mid-computation error left stale state. + /// `StorageProofCalculator` to be reused after a mid-computation error left stale state. /// Before the fix, stale data in `branch_stack`, `child_stack`, and `branch_path` /// could cause a `usize` underflow panic in `pop_branch`. #[test] fn test_proof_calculator_reuse_after_error() { - use alloy_primitives::U256; - reth_tracing::init_test_tracing(); - let mut post_state = HashedPostState::default(); - let addresses = [ + let slots = [ B256::right_padding_from(&[0x10]), B256::right_padding_from(&[0x20]), B256::right_padding_from(&[0x30]), B256::right_padding_from(&[0x40]), ]; - for addr in &addresses { - let account = - Account { nonce: 1, balance: U256::from(100u64), bytecode_hash: Some(B256::ZERO) }; - post_state.accounts.insert(*addr, Some(account)); - } + let storage: BTreeMap = + slots.iter().map(|&s| (s, U256::from(100u64))).collect(); - let harness = ProofTestHarness::new(post_state); + let harness = ProofTestHarness::new(storage); - let trie_cursor = harness.trie_cursor_factory.account_trie_cursor().unwrap(); - let hashed_cursor = harness.hashed_cursor_factory.hashed_account_cursor().unwrap(); - let mut proof_calculator = ProofCalculator::new(trie_cursor, hashed_cursor); + let trie_cursor_factory = harness.trie_cursor_factory(); + let hashed_cursor_factory = harness.hashed_cursor_factory(); + + let hashed_address = harness.hashed_address(); + let trie_cursor = trie_cursor_factory.storage_trie_cursor(hashed_address).unwrap(); + let hashed_cursor = hashed_cursor_factory.hashed_storage_cursor(hashed_address).unwrap(); + let mut proof_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); // Simulate stale state left by a mid-computation error: push fake entries onto internal // stacks and set a non-empty branch_path. @@ -1889,88 +1804,61 @@ mod tests { .push(ProofTrieBranchChild::RlpNode(RlpNode::word_rlp(&B256::ZERO))); proof_calculator.branch_path = Nibbles::from_nibbles([0x1, 0x2, 0x3]); - // clear_computation_state should reset everything so a subsequent proof() call works. + // clear_computation_state should reset everything so a subsequent call works. proof_calculator.clear_computation_state(); - let mut value_encoder = SyncAccountValueEncoder::new( - harness.trie_cursor_factory.clone(), - harness.hashed_cursor_factory.clone(), - ); - let mut sorted_addresses = addresses.to_vec(); - sorted_addresses.sort(); + let mut sorted_slots = slots.to_vec(); + sorted_slots.sort(); let mut targets: Vec = - sorted_addresses.iter().copied().map(ProofV2Target::new).collect(); + sorted_slots.iter().copied().map(ProofV2Target::new).collect(); - let result = proof_calculator.proof(&mut value_encoder, &mut targets).unwrap(); + let result = proof_calculator.storage_proof(hashed_address, &mut targets).unwrap(); // Compare against a fresh calculator to verify correctness. - let trie_cursor = harness.trie_cursor_factory.account_trie_cursor().unwrap(); - let hashed_cursor = harness.hashed_cursor_factory.hashed_account_cursor().unwrap(); - let mut fresh_calculator = ProofCalculator::new(trie_cursor, hashed_cursor); - let mut value_encoder = SyncAccountValueEncoder::new( - harness.trie_cursor_factory.clone(), - harness.hashed_cursor_factory, - ); - let fresh_result = fresh_calculator.proof(&mut value_encoder, &mut targets).unwrap(); + let trie_cursor = trie_cursor_factory.storage_trie_cursor(hashed_address).unwrap(); + let hashed_cursor = hashed_cursor_factory.hashed_storage_cursor(hashed_address).unwrap(); + let mut fresh_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); + let fresh_result = fresh_calculator.storage_proof(hashed_address, &mut targets).unwrap(); pretty_assertions::assert_eq!(fresh_result, result); } mod proptest_tests { use super::*; - use alloy_primitives::{map::B256Map, U256}; use proptest::prelude::*; - use reth_trie_common::HashedPostState; - /// Generate a strategy for Account values - fn account_strategy() -> impl Strategy { - (any::(), any::(), any::<[u8; 32]>()).prop_map( - |(nonce, balance, code_hash)| Account { - nonce, - balance: U256::from(balance), - bytecode_hash: Some(B256::from(code_hash)), - }, - ) + /// Generate a strategy for storage datasets (hashed slot → value). + fn storage_strategy() -> impl Strategy> { + prop::collection::vec((any::<[u8; 32]>(), any::()), 0..=100).prop_map(|slots| { + slots + .into_iter() + .map(|(slot_bytes, value)| (B256::from(slot_bytes), U256::from(value))) + .filter(|(_, v)| *v != U256::ZERO) + .collect() + }) } - /// Generate a strategy for `HashedPostState` with random accounts - fn hashed_post_state_strategy() -> impl Strategy { - prop::collection::vec((any::<[u8; 32]>(), account_strategy()), 0..=100).prop_map( - |accounts| { - let account_map = accounts - .into_iter() - .map(|(addr_bytes, account)| (B256::from(addr_bytes), Some(account))) - .collect::>(); - - HashedPostState { accounts: account_map, ..Default::default() } - }, - ) - } - - /// Generate a strategy for proof targets that are 80% from the `HashedPostState` accounts + /// Generate a strategy for proof targets that are 80% from existing storage slots /// and 20% random keys. Each target has a random `min_len` of 0..16. fn proof_targets_strategy( - account_keys: Vec, + slot_keys: Vec, ) -> impl Strategy> { - let num_accounts = account_keys.len(); + let num_slots = slot_keys.len(); - // Generate between 0 and (num_accounts + 5) targets - let target_count = 0..=(num_accounts + 5); + let target_count = 0..=(num_slots + 5); target_count.prop_flat_map(move |count| { - let account_keys = account_keys.clone(); + let slot_keys = slot_keys.clone(); prop::collection::vec( ( - prop::bool::weighted(0.8).prop_flat_map(move |from_accounts| { - if from_accounts && !account_keys.is_empty() { - // 80% chance: pick from existing account keys - prop::sample::select(account_keys.clone()).boxed() + prop::bool::weighted(0.8).prop_flat_map(move |from_slots| { + if from_slots && !slot_keys.is_empty() { + prop::sample::select(slot_keys.clone()).boxed() } else { - // 20% chance: generate random B256 any::<[u8; 32]>().prop_map(B256::from).boxed() } }), - 0u8..16u8, // Random min_len from 0 to 15 + 0u8..16u8, ) .prop_map(|(key, min_len)| ProofV2Target::new(key).with_min_len(min_len)), count, @@ -1981,27 +1869,19 @@ mod tests { proptest! { #![proptest_config(ProptestConfig::with_cases(4000))] #[test] - /// Tests that ProofCalculator produces valid proofs for randomly generated - /// HashedPostState with proof targets. - /// - /// This test: - /// - Generates random accounts in a HashedPostState - /// - Generates proof targets: 80% from existing account keys, 20% random - /// - Creates a test harness with the generated state - /// - Calls assert_proof with the generated targets - /// - Verifies both ProofCalculator and legacy Proof produce equivalent results + /// Tests that `StorageProofCalculator` produces valid proofs for randomly generated + /// storage datasets with proof targets. fn proptest_proof_with_targets( - (post_state, targets) in hashed_post_state_strategy() - .prop_flat_map(|post_state| { - let mut account_keys: Vec = post_state.accounts.keys().copied().collect(); - // Sort to ensure deterministic order when using PROPTEST_RNG_SEED - account_keys.sort_unstable(); - let targets_strategy = proof_targets_strategy(account_keys); - (Just(post_state), targets_strategy) + (storage, targets) in storage_strategy() + .prop_flat_map(|storage| { + let mut slot_keys: Vec = storage.keys().copied().collect(); + slot_keys.sort_unstable(); + let targets_strategy = proof_targets_strategy(slot_keys); + (Just(storage), targets_strategy) }) ) { reth_tracing::init_test_tracing(); - let harness = ProofTestHarness::new(post_state); + let harness = ProofTestHarness::new(storage); harness.assert_proof(targets).expect("Proof generation failed"); } @@ -2021,423 +1901,26 @@ mod tests { B256::from_slice(&buf) }; - // Generate random HashedPostState. - let mut post_state = HashedPostState::default(); + // Generate random storage dataset. + let mut storage = BTreeMap::new(); for _ in 0..10240 { - let hashed_addr = rand_b256(); - let account = Account { bytecode_hash: Some(hashed_addr), ..Default::default() }; - post_state.accounts.insert(hashed_addr, Some(account)); + let hashed_slot = rand_b256(); + storage.insert(hashed_slot, U256::from(1u64)); } // Collect targets; partially from real keys, partially random keys which probably won't // exist. - let mut targets = post_state.accounts.keys().copied().collect::>(); - for _ in 0..post_state.accounts.len() / 5 { + let mut targets = storage.keys().copied().collect::>(); + for _ in 0..storage.len() / 5 { targets.push(rand_b256()); } targets.sort(); // Create test harness - let harness = ProofTestHarness::new(post_state); + let harness = ProofTestHarness::new(storage); - // Assert the proof (convert B256 to ProofV2Target with no min_len for this test) harness .assert_proof(targets.into_iter().map(ProofV2Target::new)) .expect("Proof generation failed"); } - - #[test] - fn test_failing_proptest_case_0() { - use alloy_primitives::{hex, map::B256Map}; - - reth_tracing::init_test_tracing(); - - // Helper function to create B256 from hex string - let b256 = |s: &str| B256::from_slice(&hex::decode(s).unwrap()); - - // Create the HashedPostState from test case input - let mut accounts = B256Map::default(); - - // Define all account data from test case input - let account_data = [ - ( - "9f3a475db85ff1f5b5e82d8614ee4afc670d27aefb9a43da0bd863a54acf1fe6", - 8396790837504194281u64, - 9224366602005816983u64, - "103c5b0538f4e37944321a30f5cb1f7005d2ee70998106f34f36d7adb838c789", - ), - ( - "c736258fdfd23d73ec4c5e54b8c3b58e26726b361d438ef48670f028286b70ca", - 9193115115482903760u64, - 4515164289866465875u64, - "9f24ef3ab0b4893b0ec38d0e9b00f239da072ccf093b0b24f1ea1f99547abe55", - ), - ( - "780a3476520090f97e847181aee17515c5ea30b7607775103df16d2b6611a87a", - 8404772182417755681u64, - 16639574952778823617u64, - "214b12bee666ce8c64c6bbbcfafa0c3e55b4b05a8724ec4182b9a6caa774c56d", - ), - ( - "23ebfa849308a5d02c3048040217cd1f4b71fb01a9b54dafe541284ebec2bcce", - 17978809803974566048u64, - 11093542035392742776u64, - "5384dfda8f1935d98e463c00a96960ff24e4d4893ec21e5ece0d272df33ac7e9", - ), - ( - "348e476c24fac841b11d358431b4526db09edc9f39906e0ac8809886a04f3c5a", - 9422945522568453583u64, - 9737072818780682487u64, - "79f8f25b2cbb7485c5c7b627917c0f562f012d3d7ddd486212c90fbea0cf686e", - ), - ( - "830536ee6c8f780a1cd760457345b79fc09476018a59cf3e8fd427a793d99633", - 16497625187081138489u64, - 15143978245385012455u64, - "00ede4000cc2a16fca7e930761aaf30d1fddcc3803f0009d6a0742b4ee519342", - ), - ( - "806c74b024b2fe81f077ea93d2936c489689f7fe024febc3a0fb71a8a9f22fbc", - 8103477314050566918u64, - 1383893458340561723u64, - "690ed176136174c4f0cc442e6dcbcf6e7b577e30fc052430b6060f97af1f8e85", - ), - ( - "b903d962ffc520877f14e1e8328160e5b22f8086b0f7e9cba7a373a8376028a0", - 12972727566246296372u64, - 1130659127924527352u64, - "cadf1f09d8e6a0d945a58ccd2ff36e2ae99f8146f02be96873e84bef0462d64a", - ), - ( - "d36a16afff0097e06b2c28bd795b889265e2ceff9a086173113fbeb6f7a9bc42", - 15682404502571860137u64, - 2025886798818635036u64, - "c2cee70663e9ff1b521e2e1602e88723da52ccdc7a69e370cde9595af435e654", - ), - ( - "f3e8461cba0b84f5b81f8ca63d0456cb567e701ec1d6e77b1a03624c5018389b", - 5663749586038550112u64, - 7681243595728002238u64, - "072c547c3ab9744bcd2ed9dbd813bd62866a673f4ca5d46939b65e9507be0e70", - ), - ( - "40b71840b6f43a493b32f4aa755e02d572012392fd582c81a513a169447e194c", - 518207789203399614u64, - 317311275468085815u64, - "85541d48471bf639c2574600a9b637338c49729ba9e741f157cc6ebaae139da0", - ), - ( - "3f77cd91ceb7d335dd2527c29e79aaf94f14141438740051eb0163d86c35bcc9", - 16227517944662106096u64, - 12646193931088343779u64, - "54999911d82dd63d526429275115fa98f6a560bc2d8e00be24962e91e38d7182", - ), - ( - "5cd903814ba84daa6956572411cd1bf4d48a8e230003d28cc3f942697bf8debb", - 5096288383163945009u64, - 17919982845103509853u64, - "6a53c812e713f1bfe6bf21954f291140c60ec3f2ef353ecdae5dc7b263a37282", - ), - ( - "23f3602c95fd98d7fbe48a326ae1549030a2c7574099432cce5b458182f16bf2", - 11136020130962086191u64, - 12045219101880183180u64, - "ce53fb9b108a3ee90db8469e44948ba3263ca8d8a0d92a076c9516f9a3d30bd1", - ), - ( - "be86489b3594a9da83e04a9ff81c8d68d528b8b9d31f3942d1c5856a4a8c5af7", - 16293506537092575994u64, - 536238712429663046u64, - "a2af0607ade21241386ecfb3780aa90514f43595941daeff8dd599c203cde30a", - ), - ( - "97bcd85ee5d6033bdf86397e8b26f711912948a7298114be27ca5499ea99725f", - 3086656672041156193u64, - 8667446575959669532u64, - "0474377538684a991ffc9b41f970b48e65eda9e07c292e60861258ef87d45272", - ), - ( - "40065932e6c70eb907e4f2a89ec772f5382ca90a49ef44c4ae21155b9decdcc0", - 17152529399128063686u64, - 3643450822628960860u64, - "d5f6198c64c797f455f5b44062bb136734f508f9cdd02d8d69d24100ac8d6252", - ), - ( - "c136436c2db6b2ebd14985e2c883e73c6d8fd95ace54bfefae9eeca47b7da800", - 727585093455815585u64, - 521742371554431881u64, - "3dfad04a6eb46d175b63e96943c7d636c56d61063277e25557aace95820432da", - ), - ( - "9ea50348595593788645394eb041ac4f75ee4d6a4840b9cf1ed304e895060791", - 8654829249939415079u64, - 15623358443672184321u64, - "61bb0d6ffcd5b32d0ee34a3b7dfb1c495888059be02b255dd1fa3be02fa1ddbd", - ), - ( - "5abc714353ad6abda44a609f9b61f310f5b0a7df55ccf553dc2db3edda18ca17", - 5732104102609402825u64, - 15720007305337585794u64, - "8b55b7e9c6f54057322c5e0610b33b3137f1fcd46f7d4af1aca797c7b5fff033", - ), - ( - "e270b59e6e56100f9e2813f263884ba5f74190a1770dd88cd9603266174e0a6b", - 4728642361690813205u64, - 6762867306120182099u64, - "5e9aa1ff854504b4bfea4a7f0175866eba04e88e14e57ac08dddc63d6917bf47", - ), - ( - "78286294c6fb6823bb8b2b2ddb7a1e71ee64e05c9ba33b0eb8bb6654c64a8259", - 6032052879332640150u64, - 498315069638377858u64, - "799ef578ffb51a5ec42484e788d6ada4f13f0ff73e1b7b3e6d14d58caae9319a", - ), - ( - "af1b85cf284b0cb59a4bfb0f699194bcd6ad4538f27057d9d93dc7a95c1ff32e", - 1647153930670480138u64, - 13109595411418593026u64, - "429dcdf4748c0047b0dd94f3ad12b5e62bbadf8302525cc5d2aad9c9c746696f", - ), - ( - "0152b7a0626771a2518de84c01e52839e7821a655f9dcb9a174d8f52b64b7086", - 3915492299782594412u64, - 9550071871839879785u64, - "4d5e6ce993dfc9597585ae2b4bacd6d055fefc56ae825666c83e0770e4aa0527", - ), - ( - "9ea9b8a4f6bce1dba63290b81f4d1b88dfeac3e244856904a5c9d4086a10271b", - 8824593031424861220u64, - 15831101445348312026u64, - "a07602b4dd5cba679562061b7c5c0344b2edd6eba36aa97ca57a6fe01ed80a48", - ), - ( - "d7b26c2d8f85b74423a57a3da56c61829340f65967791bab849c90b5e1547e7a", - 12723258987146468813u64, - 10714399360315276559u64, - "3705e57b27d931188c0d2017ab62577355b0cdda4173203478a8562a0cdcae0c", - ), - ( - "da354ceca117552482e628937931870a28e9d4416f47a58ee77176d0b760c75b", - 1580954430670112951u64, - 14920857341852745222u64, - "a13d6b0123daa2e662699ac55a2d0ed1d2e73a02ed00ee5a4dd34db8dea2a37e", - ), - ( - "53140d0c8b90b4c3c49e0604879d0dc036e914c4c4f799f1ccae357fef2613e3", - 12521658365236780592u64, - 11630410585145916252u64, - "46f06ce1435a7a0fd3476bbcffe4aac88c33a7fcf50080270b715d25c93d96d7", - ), - ( - "4b1c151815da6f18f27e98890eac1f7d43b80f3386c7c7d15ee0e43a7edfe0a6", - 9575643484508382933u64, - 3471795678079408573u64, - "a9e6a8fac46c5fc61ae07bddc223e9f105f567ad039d2312a03431d1f24d8b2c", - ), - ( - "39436357a2bcd906e58fb88238be2ddb2e43c8a5590332e3aee1d1134a0d0ba4", - 10171391804125392783u64, - 2915644784933705108u64, - "1d5db03f07137da9d3af85096ed51a4ff64bb476a79bf4294850438867fe3833", - ), - ( - "5fbe8d9d6a12b061a94a72436caec331ab1fd4e472c3bb4688215788c5e9bcd9", - 5663512925993713993u64, - 18170240962605758111u64, - "bd5d601cbcb47bd84d410bafec72f2270fceb1ed2ed11499a1e218a9f89a9f7f", - ), - ( - "f2e29a909dd31b38e9b92b2b2d214e822ebddb26183cd077d4009773854ab099", - 7512894577556564068u64, - 15905517369556068583u64, - "a36e66ce11eca7900248c518e12c6c08d659d609f4cbd98468292de7adf780f2", - ), - ( - "3eb82e6d6e964ca56b50cc54bdd55bb470c67a4932aba48d27d175d1be2542aa", - 12645567232869276853u64, - 8416544129280224452u64, - "d177f246a45cc76d39a8ee06b32d8c076c986106b9a8e0455a0b41d00fe3cbde", - ), - ( - "c903731014f6a5b4b45174ef5f9d5a2895a19d1308292f25aa323fda88acc938", - 5989992708726918818u64, - 17462460601463602125u64, - "01241c61ad1c8adc27e5a1096ab6c643af0fbb6e2818ef77272b70e5c3624abc", - ), - ( - "ef46410ab47113a78c27e100ed1b476f82a8789012bd95a047a4b23385596f53", - 11884362385049322305u64, - 619908411193297508u64, - "e9b4c929e26077ac1fd5a771ea5badc7e9ddb58a20a2a797389c63b3dd3df00d", - ), - ( - "be336bc6722bb787d542f4ef8ecb6f46a449557ca7b69b8668b6fed19dfa73b7", - 11490216175357680195u64, - 13136528075688203375u64, - "31bfd807f92e6d5dc5c534e9ad0cb29d00c6f0ae7d7b5f1e65f8e683de0bce59", - ), - ( - "39599e5828a8f102b8a6808103ae7df29b838fe739d8b73f72f8f0d282ca5a47", - 6957481657451522177u64, - 4196708540027060724u64, - "968a12d79704b313471ece148cb4e26b8b11620db2a9ee6da0f5dc200801f555", - ), - ( - "acd99530bb14ca9a7fac3df8eebfd8cdd234b0f6f7c3893a20bc159a4fd54df5", - 9792913946138032169u64, - 9219321015500590384u64, - "db45a98128770a329c82c904ceee21d3917f6072b8bd260e46218f65656c964c", - ), - ( - "453b80a0b11f237011c57630034ed46888ad96f4300a58aea24c0fe4a5472f68", - 14407140330317286994u64, - 5783848199433986576u64, - "b8cded0b4efd6bf2282a4f8b3c353f74821714f84df9a6ab25131edc7fdad00f", - ), - ( - "23e464d1e9b413a4a6b378cee3a0405ec6ccbb4d418372d1b42d3fde558d48d1", - 1190974500816796805u64, - 1621159728666344828u64, - "d677f41d273754da3ab8080b605ae07a7193c9f35f6318b809e42a1fdf594be3", - ), - ( - "d0e590648dec459aca50edf44251627bab5a36029a0c748b1ddf86b7b887425b", - 4807164391931567365u64, - 4256042233199858200u64, - "a8677de59ab856516a03663730af54c55a79169346c3d958b564e5ee35d8622b", - ), - ( - "72387dbaaaf2c39175d8c067558b869ba7bdc6234bc63ee97a53fea1d988ff39", - 5046042574093452325u64, - 3088471405044806123u64, - "83c226621506b07073936aec3c87a8e2ef34dd42e504adc2bbab39ede49aa77f", - ), - ( - "de6874ca2b9dd8b4347c25d32b882a2a7c127b127d6c5e00d073ab3853339d0e", - 6112730660331874479u64, - 10943246617310133253u64, - "a0c96a69e5ab3e3fe1a1a2fd0e5e68035ff3c7b2985e4e6b8407d4c377600c6f", - ), - ( - "b0d8689e08b983e578d6a0c136b76952497087ee144369af653a0a1b231eeb28", - 15612408165265483596u64, - 13112504741499957010u64, - "4fc49edeff215f1d54dfd2e60a14a3de2abecbe845db2148c7aee32c65f3c91c", - ), - ( - "29d7fb6b714cbdd1be95c4a268cef7f544329642ae05fab26dc251bbc773085e", - 17509162400681223655u64, - 5075629528173950353u64, - "781ecb560ef8cf0bcfa96b8d12075f4cf87ad52d69dfb2c72801206eded135bd", - ), - ( - "85dbf7074c93a4e39b67cc504b35351ee16c1fab437a7fb9e5d9320be1d9c13c", - 17692199403267011109u64, - 7069378948726478427u64, - "a3ff0d8dee5aa0214460f5b03a70bd76ef00ac8c07f07c0b3d82c9c57e4c72a9", - ), - ( - "7bd5a9f3126b4a681afac9a177c6ff7f3dd80d8d7fd5a821a705221c96975ded", - 17807965607151214145u64, - 5562549152802999850u64, - "dbc3861943b7372e49698b1c5b0e4255b7c93e9fa2c13d6a4405172ab0db9a5b", - ), - ( - "496d13d45dbe7eb02fee23c914ac9fefdf86cf5c937c520719fc6a31b3fcf8d9", - 13446203348342334214u64, - 332407928246785326u64, - "d2d73f15fcdc12adce25b911aa4551dcf900e225761e254eb6392cbd414e389c", - ), - ( - "b2f0a0127fc74a35dec5515b1c7eb8a3833ca99925049c47cd109ec94678e6c5", - 9683373807753869342u64, - 7570798132195583433u64, - "e704110433e5ab17858c5fbe4f1b6d692942d5f5981cac68372d06066bee97fe", - ), - ( - "d5f65171b17d7720411905ef138e84b9d1f459e2b248521c449f1781aafd675e", - 10088287051097617949u64, - 185695341767856973u64, - "8d784c4171e242af4187f30510cd298106b7e68cd3088444a055cb1f3893ba28", - ), - ( - "7dcbec5c20fbf1d69665d4b9cdc450fea2d0098e78084bce0a864fea4ba016b0", - 13908816056510478374u64, - 17793990636863600193u64, - "18e9026372d91e116faf813ce3ba9d7fadef2bb3b779be6efeba8a4ecd9e1f38", - ), - ( - "d4f772f4bf1cfa4dad4b55962b50900da8657a4961dabbdf0664f3cd42d368f8", - 16438076732493217366u64, - 18419670900047275588u64, - "b9fd16b16b3a8fab4d9c47f452d9ce4aad530edeb06ee6830589078db2f79382", - ), - ( - "2d009535f82b1813ce2ca7236ceae7864c1e4d3644a1acd02656919ef1aa55d0", - 10206924399607440433u64, - 3986996560633257271u64, - "db49e225bd427768599a7c06d7aee432121fa3179505f9ee8c717f51c7fa8c54", - ), - ( - "b1d7a292df12e505e7433c7e850e9efc81a8931b65f3354a66402894b6d5ba76", - 8215550459234533539u64, - 10241096845089693964u64, - "5567813b312cb811909a01d14ee8f7ec4d239198ea2d37243123e1de2317e1af", - ), - ( - "85120d6f43ea9258accf6a87e49cd5461d9b3735a4dc623f9fbcc669cbdd1ce6", - 17566770568845511328u64, - 8686605711223432099u64, - "e163f4fcd17acf5714ee48278732808601e861cd4c4c24326cd24431aab1d0ce", - ), - ( - "48fe4c22080c6e702f7af0e97fb5354c1c14ff4616c6fc4ac8a4491d4b9b3473", - 14371024664575587429u64, - 15149464181957728462u64, - "061dec7af4b41bdd056306a8b13b71d574a49a4595884b1a77674f5150d4509d", - ), - ( - "29d14b014fa3cabbb3b4808e751e81f571de6d0e727cae627318a5fd82fef517", - 9612395342616083334u64, - 3700617080099093094u64, - "f7b33a2d2784441f77f0cc1c87930e79bea3332a921269b500e81d823108561c", - ), - ]; - - // Insert all accounts - for (addr, nonce, balance, code_hash) in &account_data { - accounts.insert( - b256(addr), - Some(Account { - nonce: *nonce, - balance: U256::from(*balance), - bytecode_hash: Some(b256(code_hash)), - }), - ); - } - - let post_state = HashedPostState { accounts, storages: Default::default() }; - - // Create test harness - let harness = ProofTestHarness::new(post_state); - - // Create targets from test case input - these are Nibbles in hex form - let targets = vec![ - ProofV2Target::new(b256( - "0153000000000000000000000000000000000000000000000000000000000000", - )) - .with_min_len(2), - ProofV2Target::new(b256( - "0000000000000000000000000000000000000000000000000000000000000000", - )) - .with_min_len(2), - ProofV2Target::new(b256( - "2300000000000000000000000000000000000000000000000000000000000000", - )) - .with_min_len(2), - ]; - - // Test proof generation - harness.assert_proof(targets).expect("Proof generation failed"); - } } diff --git a/crates/trie/trie/src/test_utils.rs b/crates/trie/trie/src/test_utils.rs index ebd976b182..c1871a8997 100644 --- a/crates/trie/trie/src/test_utils.rs +++ b/crates/trie/trie/src/test_utils.rs @@ -47,3 +47,250 @@ pub fn storage_root_prehashed>(storage: I) let encoded_storage = storage.into_iter().map(|(k, v)| (k, encode_fixed_size(&v))); triehash::trie_root::(encoded_storage) } + +// --------------------------------------------------------------------------- +// Trie test harness +// --------------------------------------------------------------------------- + +use crate::{ + hashed_cursor::{ + mock::MockHashedCursorFactory, HashedCursorFactory, HashedPostStateCursorFactory, + }, + proof_v2::StorageProofCalculator, + trie_cursor::{mock::MockTrieCursorFactory, TrieCursorFactory}, + StorageRoot, +}; +use alloy_primitives::map::HashSet; +use reth_trie_common::{ + prefix_set::PrefixSetMut, updates::StorageTrieUpdates, BranchNodeCompact, + HashedPostStateSorted, HashedStorage, Nibbles, ProofTrieNodeV2, ProofV2Target, +}; +use std::{collections::BTreeMap, iter::once}; + +/// General-purpose test harness for storage trie tests. +/// +/// Manages a base storage dataset, computes expected roots via [`StorageRoot`], and generates +/// V2 proofs via [`StorageProofCalculator`] using mock cursors. +#[derive(Debug)] +pub struct TrieTestHarness { + /// The base storage dataset (hashed slot → value). Zero-valued entries are absent. + storage: BTreeMap, + /// The expected storage root, calculated by [`StorageRoot`]. + original_root: B256, + /// The starting storage trie updates, used for minimization. + storage_trie_updates: StorageTrieUpdates, + /// Mock factory for trie cursors. + trie_cursor_factory: MockTrieCursorFactory, + /// Mock factory for hashed cursors. + hashed_cursor_factory: MockHashedCursorFactory, +} + +impl TrieTestHarness { + /// Creates a new test harness from a map of hashed storage slots to values. + pub fn new(storage: BTreeMap) -> Self { + let mut harness = Self { + storage, + original_root: B256::ZERO, + storage_trie_updates: StorageTrieUpdates::default(), + trie_cursor_factory: MockTrieCursorFactory::new( + BTreeMap::new(), + once((B256::ZERO, BTreeMap::new())).collect(), + ), + hashed_cursor_factory: MockHashedCursorFactory::new( + BTreeMap::new(), + once((B256::ZERO, BTreeMap::new())).collect(), + ), + }; + harness.rebuild(); + harness + } + + /// Computes the storage root and trie updates after applying the given changeset on top + /// of the current base storage. + /// + /// Builds a [`HashedPostStateCursorFactory`] overlay, derives a prefix set from the + /// changeset keys, and passes both into [`StorageRoot::new_hashed`]. + pub fn get_root_with_updates( + &self, + changeset: &BTreeMap, + ) -> (B256, StorageTrieUpdates) { + let mut prefix_set = PrefixSetMut::with_capacity(changeset.len()); + for hashed_slot in changeset.keys() { + prefix_set.insert(Nibbles::unpack(hashed_slot)); + } + + let hashed_storage = + HashedStorage::from_iter(false, changeset.iter().map(|(&k, &v)| (k, v))); + let overlay = HashedPostStateSorted::new( + Vec::new(), + once((self.hashed_address(), hashed_storage.into_sorted())).collect(), + ); + let overlay_cursor_factory = + HashedPostStateCursorFactory::new(self.hashed_cursor_factory.clone(), &overlay); + + let (root, _, updates) = StorageRoot::new_hashed( + self.trie_cursor_factory.clone(), + overlay_cursor_factory, + self.hashed_address(), + prefix_set.freeze(), + #[cfg(feature = "metrics")] + crate::metrics::TrieRootMetrics::new(crate::TrieType::Storage), + ) + .root_with_updates() + .expect("StorageRoot should succeed"); + + (root, updates) + } + + /// Merges `changeset` into the base storage (zero values remove entries) and + /// rebuilds the harness from scratch with the resulting storage. + pub fn apply_changeset(&mut self, changeset: BTreeMap) { + for (k, v) in changeset { + if v == U256::ZERO { + self.storage.remove(&k); + } else { + self.storage.insert(k, v); + } + } + self.rebuild(); + } + + /// Recomputes the storage root, trie updates, and cursor factories from `self.storage`. + fn rebuild(&mut self) { + self.hashed_cursor_factory = MockHashedCursorFactory::new( + BTreeMap::new(), + once((self.hashed_address(), self.storage.clone())).collect(), + ); + + let (root, _, updates) = StorageRoot::new_hashed( + MockTrieCursorFactory::new( + BTreeMap::new(), + once((self.hashed_address(), BTreeMap::new())).collect(), + ), + self.hashed_cursor_factory.clone(), + self.hashed_address(), + crate::prefix_set::PrefixSet::default(), + #[cfg(feature = "metrics")] + crate::metrics::TrieRootMetrics::new(crate::TrieType::Storage), + ) + .root_with_updates() + .expect("StorageRoot should succeed"); + + self.trie_cursor_factory = MockTrieCursorFactory::new( + BTreeMap::new(), + once(( + self.hashed_address(), + updates.storage_nodes.iter().map(|(k, v)| (*k, v.clone())).collect(), + )) + .collect(), + ); + + self.original_root = root; + self.storage_trie_updates = updates; + } + + /// Returns the hashed address used for all storage trie operations. + pub const fn hashed_address(&self) -> B256 { + B256::ZERO + } + + /// Returns a reference to the base storage dataset. + pub const fn storage(&self) -> &BTreeMap { + &self.storage + } + + /// Returns the expected storage root. + pub const fn original_root(&self) -> B256 { + self.original_root + } + + /// Returns a reference to the storage trie updates. + pub const fn storage_trie_updates(&self) -> &StorageTrieUpdates { + &self.storage_trie_updates + } + + /// Replaces the trie cursor factory with one backed by the given trie nodes. + pub fn set_trie_nodes(&mut self, trie_nodes: BTreeMap) { + self.trie_cursor_factory = MockTrieCursorFactory::new( + BTreeMap::new(), + once((self.hashed_address(), trie_nodes)).collect(), + ); + } + + /// Returns a clone of the mock trie cursor factory. + pub fn trie_cursor_factory(&self) -> MockTrieCursorFactory { + self.trie_cursor_factory.clone() + } + + /// Returns a clone of the mock hashed cursor factory. + pub fn hashed_cursor_factory(&self) -> MockHashedCursorFactory { + self.hashed_cursor_factory.clone() + } + + /// Obtains the root node of the storage trie via [`StorageProofCalculator`]. + pub fn root_node(&self) -> ProofTrieNodeV2 { + let trie_cursor = self + .trie_cursor_factory + .storage_trie_cursor(self.hashed_address()) + .expect("storage trie cursor should succeed"); + let hashed_cursor = self + .hashed_cursor_factory + .hashed_storage_cursor(self.hashed_address()) + .expect("hashed storage cursor should succeed"); + + let mut proof_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); + proof_calculator + .storage_root_node(self.hashed_address()) + .expect("storage_root_node should succeed") + } + + /// Generates storage proofs for the given targets using [`StorageProofCalculator`]. + /// + /// Also computes and returns the root hash (if the proof contains a root node) by reusing + /// the calculator after the proof call. + pub fn proof_v2(&self, targets: &mut [ProofV2Target]) -> (Vec, Option) { + let trie_cursor = self + .trie_cursor_factory + .storage_trie_cursor(self.hashed_address()) + .expect("storage trie cursor should succeed"); + let hashed_cursor = self + .hashed_cursor_factory + .hashed_storage_cursor(self.hashed_address()) + .expect("hashed storage cursor should succeed"); + + let mut proof_calculator = StorageProofCalculator::new_storage(trie_cursor, hashed_cursor); + let proofs = proof_calculator + .storage_proof(self.hashed_address(), targets) + .expect("proof_v2 should succeed"); + let root_hash = + proof_calculator.compute_root_hash(&proofs).expect("compute_root_hash should succeed"); + (proofs, root_hash) + } + + /// Removes all entries from `updates` that are redundant with the starting storage + /// trie updates. + /// + /// A storage node is redundant if it exists in the starting set with the same value. + /// A removed node is redundant if it was already absent from the starting set. + /// The `is_deleted` flag is cleared if it matches the starting value. + pub fn minimize_trie_updates(&self, updates: &mut StorageTrieUpdates) { + if updates.is_deleted == self.storage_trie_updates.is_deleted { + updates.is_deleted = false; + } + + // StorageTrieUpdates::finalize can leave the same path in both storage_nodes + // and removed_nodes. Per into_sorted, updated nodes take precedence over + // removed ones. Record which paths had an update before minimization so we + // can drop their corresponding removals. + let paths_with_updates: HashSet = updates.storage_nodes.keys().copied().collect(); + + updates + .storage_nodes + .retain(|path, node| self.storage_trie_updates.storage_nodes.get(path) != Some(node)); + + updates.removed_nodes.retain(|path| { + self.storage_trie_updates.storage_nodes.contains_key(path) && + !paths_with_updates.contains(path) + }); + } +}