feat(trie): Combine extension and branch nodes in output from proof v2 (#22021)

Co-authored-by: Arsenii Kulikov <klkvrr@gmail.com>
This commit is contained in:
Brian Picciano
2026-02-17 12:12:48 +01:00
committed by GitHub
parent df9e3669aa
commit 117b212e2e
17 changed files with 875 additions and 1528 deletions

View File

@@ -12,8 +12,8 @@ use reth_primitives_traits::FastInstant as Instant;
use reth_provider::AccountReader;
use reth_revm::state::EvmState;
use reth_trie::{
added_removed_keys::MultiAddedRemovedKeys, proof_v2, HashedPostState, HashedStorage,
MultiProofTargets,
added_removed_keys::{default_added_removed_keys, MultiAddedRemovedKeys},
proof_v2, HashedPostState, HashedStorage, MultiProofTargets,
};
#[cfg(test)]
use reth_trie_parallel::stats::ParallelTrieTracker;
@@ -919,7 +919,7 @@ impl MultiProofTask {
.storages
.get(account)
.cloned()
.unwrap_or_default(),
.unwrap_or_else(default_added_removed_keys),
);
}
}

View File

@@ -15,7 +15,7 @@ pub struct MultiAddedRemovedKeys {
/// Returns [`AddedRemovedKeys`] with default parameters. This is necessary while we are not yet
/// tracking added keys.
fn default_added_removed_keys() -> AddedRemovedKeys {
pub fn default_added_removed_keys() -> AddedRemovedKeys {
AddedRemovedKeys::default().with_assume_added(true)
}

View File

@@ -37,7 +37,7 @@ mod key;
pub use key::{KeccakKeyHasher, KeyHasher};
mod nibbles;
pub use nibbles::{Nibbles, StoredNibbles, StoredNibblesSubKey};
pub use nibbles::{depth_first_cmp, Nibbles, StoredNibbles, StoredNibblesSubKey};
mod storage;
pub use storage::StorageTrieEntry;
@@ -48,6 +48,9 @@ pub use subnode::StoredSubNode;
mod trie;
pub use trie::{BranchNodeMasks, BranchNodeMasksMap, ProofTrieNode};
mod trie_node_v2;
pub use trie_node_v2::*;
/// The implementation of a container for storing intermediate changes to a trie.
/// The container indicates when the trie has been modified.
pub mod prefix_set;

View File

@@ -1,7 +1,28 @@
use alloc::vec::Vec;
use core::cmp::Ordering;
use derive_more::Deref;
pub use nybbles::Nibbles;
/// Compares two [`Nibbles`] in depth-first order.
///
/// In depth-first ordering:
/// - Descendants come before their ancestors (children before parents)
/// - Siblings are ordered lexicographically
pub fn depth_first_cmp(a: &Nibbles, b: &Nibbles) -> Ordering {
if a.len() == b.len() {
return a.cmp(b)
}
let common_prefix_len = a.common_prefix_length(b);
if a.len() == common_prefix_len {
return Ordering::Greater
} else if b.len() == common_prefix_len {
return Ordering::Less
}
a.get_unchecked(common_prefix_len).cmp(&b.get_unchecked(common_prefix_len))
}
/// The representation of nibbles of the merkle trie stored in the database.
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, derive_more::Index)]
#[cfg_attr(any(test, feature = "serde"), derive(serde::Serialize, serde::Deserialize))]

View File

@@ -1,6 +1,6 @@
//! Merkle trie proofs.
use crate::{BranchNodeMasksMap, Nibbles, ProofTrieNode, TrieAccount};
use crate::{BranchNodeMasksMap, Nibbles, ProofTrieNodeV2, TrieAccount};
use alloc::{borrow::Cow, vec::Vec};
use alloy_consensus::constants::KECCAK_EMPTY;
use alloy_primitives::{
@@ -448,9 +448,9 @@ impl TryFrom<MultiProof> for DecodedMultiProof {
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct DecodedMultiProofV2 {
/// Account trie proof nodes
pub account_proofs: Vec<ProofTrieNode>,
pub account_proofs: Vec<ProofTrieNodeV2>,
/// Storage trie proof nodes indexed by account
pub storage_proofs: B256Map<Vec<ProofTrieNode>>,
pub storage_proofs: B256Map<Vec<ProofTrieNodeV2>>,
}
impl DecodedMultiProofV2 {
@@ -477,6 +477,34 @@ impl DecodedMultiProofV2 {
}
}
impl From<DecodedMultiProof> for DecodedMultiProofV2 {
fn from(proof: DecodedMultiProof) -> Self {
let account_proofs =
decoded_proof_nodes_to_v2(proof.account_subtree, &proof.branch_node_masks);
let storage_proofs = proof
.storages
.into_iter()
.map(|(address, storage)| {
(address, decoded_proof_nodes_to_v2(storage.subtree, &storage.branch_node_masks))
})
.collect();
Self { account_proofs, storage_proofs }
}
}
/// Converts a [`DecodedProofNodes`] (path → [`TrieNode`] map) into a `Vec<ProofTrieNodeV2>`,
/// merging extension nodes into their child branch nodes.
fn decoded_proof_nodes_to_v2(
nodes: DecodedProofNodes,
masks: &BranchNodeMasksMap,
) -> Vec<ProofTrieNodeV2> {
let mut sorted: Vec<_> = nodes.into_inner().into_iter().collect();
sorted.sort_unstable_by(|a, b| crate::depth_first_cmp(&a.0, &b.0));
ProofTrieNodeV2::from_sorted_trie_nodes(
sorted.into_iter().map(|(path, node)| (path, node, masks.get(&path).copied())),
)
}
/// The merkle multiproof of storage trie.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct StorageMultiProof {

View File

@@ -0,0 +1,219 @@
//! Version 2 types related to representing nodes in an MPT.
use crate::BranchNodeMasks;
use alloc::vec::Vec;
use alloy_primitives::hex;
use alloy_rlp::{bytes, Decodable, Encodable, EMPTY_STRING_CODE};
use alloy_trie::{
nodes::{BranchNodeRef, ExtensionNode, ExtensionNodeRef, LeafNode, RlpNode, TrieNode},
Nibbles, TrieMask,
};
use core::fmt;
/// Carries all information needed by a sparse trie to reveal a particular node.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ProofTrieNodeV2 {
/// Path of the node.
pub path: Nibbles,
/// The node itself.
pub node: TrieNodeV2,
/// Tree and hash masks for the node, if known.
/// Both masks are always set together (from database branch nodes).
pub masks: Option<BranchNodeMasks>,
}
impl ProofTrieNodeV2 {
/// Converts an iterator of `(path, TrieNode, masks)` tuples into `Vec<ProofTrieNodeV2>`,
/// merging extension nodes into their child branch nodes.
///
/// The input **must** be sorted in depth-first order (children before parents) for extension
/// merging to work correctly.
pub fn from_sorted_trie_nodes(
iter: impl IntoIterator<Item = (Nibbles, TrieNode, Option<BranchNodeMasks>)>,
) -> Vec<Self> {
let iter = iter.into_iter();
let mut result = Vec::with_capacity(iter.size_hint().0);
for (path, node, masks) in iter {
match node {
TrieNode::EmptyRoot => {
result.push(Self { path, node: TrieNodeV2::EmptyRoot, masks });
}
TrieNode::Leaf(leaf) => {
result.push(Self { path, node: TrieNodeV2::Leaf(leaf), masks });
}
TrieNode::Branch(branch) => {
result.push(Self {
path,
node: TrieNodeV2::Branch(BranchNodeV2 {
key: Nibbles::new(),
branch_rlp_node: None,
stack: branch.stack,
state_mask: branch.state_mask,
}),
masks,
});
}
TrieNode::Extension(ext) => {
// In depth-first order, the child branch comes BEFORE the parent
// extension. The child branch should be the last item we added to
// result, at path extension.path + extension.key.
let expected_branch_path = path.join(&ext.key);
// Check if the last item in result is the child branch
if let Some(last) = result.last_mut() &&
last.path == expected_branch_path &&
let TrieNodeV2::Branch(branch_v2) = &mut last.node
{
debug_assert!(
branch_v2.key.is_empty(),
"Branch at {:?} already has extension key {:?}",
last.path,
branch_v2.key
);
branch_v2.key = ext.key;
branch_v2.branch_rlp_node = Some(ext.child);
last.path = path;
}
// If we reach here, the extension's child is not a branch in the
// result. This happens when the child branch is hashed (not revealed
// in the proof). In V2 format, extension nodes are always combined
// with their child branch, so we skip extension nodes whose child
// isn't revealed.
}
}
}
result
}
}
/// Enum representing an MPT trie node.
///
/// This is a V2 representiation, differing from [`TrieNode`] in that branch and extension nodes are
/// compressed into a single node.
#[derive(PartialEq, Eq, Clone, Debug)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub enum TrieNodeV2 {
/// Variant representing empty root node.
EmptyRoot,
/// Variant representing a [`BranchNodeV2`].
Branch(BranchNodeV2),
/// Variant representing a [`LeafNode`].
Leaf(LeafNode),
/// Variant representing an [`ExtensionNode`].
///
/// This will only be used for extension nodes for which child is not inlined. This variant
/// will never be produced by proof workers that will always reveal a full path to a requested
/// leaf.
Extension(ExtensionNode),
}
impl Encodable for TrieNodeV2 {
fn encode(&self, out: &mut dyn bytes::BufMut) {
match self {
Self::EmptyRoot => {
out.put_u8(EMPTY_STRING_CODE);
}
Self::Leaf(leaf) => {
leaf.as_ref().encode(out);
}
Self::Branch(branch) => branch.encode(out),
Self::Extension(ext) => {
ext.encode(out);
}
}
}
}
impl Decodable for TrieNodeV2 {
fn decode(buf: &mut &[u8]) -> Result<Self, alloy_rlp::Error> {
match TrieNode::decode(buf)? {
TrieNode::EmptyRoot => Ok(Self::EmptyRoot),
TrieNode::Leaf(leaf) => Ok(Self::Leaf(leaf)),
TrieNode::Branch(branch) => Ok(Self::Branch(BranchNodeV2::new(
Default::default(),
branch.stack,
branch.state_mask,
None,
))),
TrieNode::Extension(ext) => {
if ext.child.is_hash() {
Ok(Self::Extension(ext))
} else {
let Self::Branch(mut branch) = Self::decode(&mut ext.child.as_ref())? else {
return Err(alloy_rlp::Error::Custom(
"extension node child is not a branch",
));
};
branch.key = ext.key;
Ok(Self::Branch(branch))
}
}
}
}
}
/// A branch node in an Ethereum Merkle Patricia Trie.
///
/// Branch node is a 17-element array consisting of 16 slots that correspond to each hexadecimal
/// character and an additional slot for a value. We do exclude the node value since all paths have
/// a fixed size.
///
/// This node also encompasses the possible parent extension node of a branch via the `key` field.
#[derive(PartialEq, Eq, Clone, Default)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct BranchNodeV2 {
/// The key for the branch's parent extension. if key is empty then the branch does not have a
/// parent extension.
pub key: Nibbles,
/// The collection of RLP encoded children.
pub stack: Vec<RlpNode>,
/// The bitmask indicating the presence of children at the respective nibble positions
pub state_mask: TrieMask,
/// [`RlpNode`] encoding of the branch node. Always provided when `key` is not empty (i.e this
/// is an extension node).
pub branch_rlp_node: Option<RlpNode>,
}
impl fmt::Debug for BranchNodeV2 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("BranchNode")
.field("key", &self.key)
.field("stack", &self.stack.iter().map(hex::encode).collect::<Vec<_>>())
.field("state_mask", &self.state_mask)
.field("branch_rlp_node", &self.branch_rlp_node)
.finish()
}
}
impl BranchNodeV2 {
/// Creates a new branch node with the given short key, stack, and state mask.
pub const fn new(
key: Nibbles,
stack: Vec<RlpNode>,
state_mask: TrieMask,
branch_rlp_node: Option<RlpNode>,
) -> Self {
Self { key, stack, state_mask, branch_rlp_node }
}
}
impl Encodable for BranchNodeV2 {
fn encode(&self, out: &mut dyn bytes::BufMut) {
if self.key.is_empty() {
BranchNodeRef::new(&self.stack, self.state_mask).encode(out);
return;
}
let branch_rlp_node = self
.branch_rlp_node
.as_ref()
.expect("branch_rlp_node must always be present for extension nodes");
ExtensionNodeRef::new(&self.key, branch_rlp_node.as_slice()).encode(out);
}
}

View File

@@ -1,20 +1,14 @@
use crate::{
metrics::ParallelTrieMetrics,
proof_task::{
AccountMultiproofInput, ProofResult, ProofResultContext, ProofWorkerHandle,
StorageProofInput, StorageProofResultMessage,
},
proof_task::{AccountMultiproofInput, ProofResult, ProofResultContext, ProofWorkerHandle},
root::ParallelStateRootError,
StorageRootTargets,
};
use alloy_primitives::{map::B256Set, B256};
use crossbeam_channel::{unbounded as crossbeam_unbounded, Receiver as CrossbeamReceiver};
use reth_execution_errors::StorageRootError;
use crossbeam_channel::unbounded as crossbeam_unbounded;
use reth_primitives_traits::FastInstant as Instant;
use reth_storage_errors::db::DatabaseError;
use reth_trie::{
prefix_set::{PrefixSet, PrefixSetMut, TriePrefixSets, TriePrefixSetsMut},
DecodedMultiProof, DecodedStorageMultiProof, HashedPostState, MultiProofTargets, Nibbles,
prefix_set::{PrefixSetMut, TriePrefixSets, TriePrefixSetsMut},
DecodedMultiProof, HashedPostState, MultiProofTargets, Nibbles,
};
use reth_trie_common::added_removed_keys::MultiAddedRemovedKeys;
use std::sync::Arc;
@@ -78,78 +72,6 @@ impl ParallelProof {
self.multi_added_removed_keys = multi_added_removed_keys;
self
}
/// Queues a storage proof task and returns a receiver for the result.
fn send_storage_proof(
&self,
hashed_address: B256,
prefix_set: PrefixSet,
target_slots: B256Set,
) -> Result<CrossbeamReceiver<StorageProofResultMessage>, ParallelStateRootError> {
let (result_tx, result_rx) = crossbeam_channel::unbounded();
let input = if self.v2_proofs_enabled {
StorageProofInput::new(
hashed_address,
target_slots.into_iter().map(Into::into).collect(),
)
} else {
StorageProofInput::legacy(
hashed_address,
prefix_set,
target_slots,
self.collect_branch_node_masks,
self.multi_added_removed_keys.clone(),
)
};
self.proof_worker_handle
.dispatch_storage_proof(input, result_tx)
.map_err(|e| ParallelStateRootError::Other(e.to_string()))?;
Ok(result_rx)
}
/// Generate a storage multiproof according to the specified targets and hashed address.
pub fn storage_proof(
self,
hashed_address: B256,
target_slots: B256Set,
) -> Result<DecodedStorageMultiProof, ParallelStateRootError> {
let total_targets = target_slots.len();
let prefix_set = if self.v2_proofs_enabled {
PrefixSet::default()
} else {
PrefixSetMut::from(target_slots.iter().map(Nibbles::unpack)).freeze()
};
trace!(
target: "trie::parallel_proof",
total_targets,
?hashed_address,
"Starting storage proof generation"
);
let receiver = self.send_storage_proof(hashed_address, prefix_set, target_slots)?;
let proof_msg = receiver.recv().map_err(|_| {
ParallelStateRootError::StorageRoot(StorageRootError::Database(DatabaseError::Other(
format!("channel closed for {hashed_address}"),
)))
})?;
// Extract storage proof directly from the result
let proof_result = proof_msg.result?;
let storage_proof = Into::<Option<DecodedStorageMultiProof>>::into(proof_result)
.expect("Partial proofs are not yet supported");
trace!(
target: "trie::parallel_proof",
total_targets,
?hashed_address,
"Storage proof generation completed"
);
Ok(storage_proof)
}
/// Extends prefix sets with the given multiproof targets and returns the frozen result.
///
@@ -251,7 +173,7 @@ mod tests {
use alloy_primitives::{
keccak256,
map::{B256Set, DefaultHashBuilder, HashMap},
Address, U256,
Address, B256, U256,
};
use rand::Rng;
use reth_primitives_traits::{Account, StorageEntry};

View File

@@ -59,7 +59,7 @@ use reth_trie::{
trie_cursor::{InstrumentedTrieCursor, TrieCursorFactory, TrieCursorMetricsCache},
walker::TrieWalker,
DecodedMultiProof, DecodedMultiProofV2, DecodedStorageMultiProof, HashBuilder, HashedPostState,
MultiProofTargets, Nibbles, ProofTrieNode, TRIE_ACCOUNT_RLP_MAX_SIZE,
MultiProofTargets, Nibbles, ProofTrieNodeV2, TRIE_ACCOUNT_RLP_MAX_SIZE,
};
use reth_trie_common::{
added_removed_keys::MultiAddedRemovedKeys,
@@ -737,7 +737,7 @@ pub(crate) enum StorageProofResult {
},
V2 {
/// The calculated V2 proof nodes
proof: Vec<ProofTrieNode>,
proof: Vec<ProofTrieNodeV2>,
/// The storage root calculated by the V2 proof
root: Option<B256>,
},
@@ -753,23 +753,6 @@ impl StorageProofResult {
}
}
impl From<StorageProofResult> for Option<DecodedStorageMultiProof> {
/// Returns None if the V2 proof result doesn't have a calculated root hash.
fn from(proof_result: StorageProofResult) -> Self {
match proof_result {
StorageProofResult::Legacy { proof } => Some(proof),
StorageProofResult::V2 { proof, root } => root.map(|root| {
let branch_node_masks = proof
.iter()
.filter_map(|node| node.masks.map(|masks| (node.path, masks)))
.collect();
let subtree = proof.into_iter().map(|node| (node.path, node.node)).collect();
DecodedStorageMultiProof { root, subtree, branch_node_masks }
}),
}
}
}
/// Message containing a completed storage proof result with metadata.
#[derive(Debug)]
pub struct StorageProofResultMessage {
@@ -1649,17 +1632,10 @@ where
proof_msg.hashed_address, hashed_address,
"storage worker must return same address"
);
let proof_result = proof_msg.result?;
let Some(root) = proof_result.root() else {
trace!(
target: "trie::proof_task",
?proof_result,
"Received proof_result without root",
);
panic!("Partial proofs are not yet supported");
let StorageProofResult::Legacy { proof } = proof_msg.result? else {
unreachable!("v2 result in legacy worker")
};
let proof = Into::<Option<DecodedStorageMultiProof>>::into(proof_result)
.expect("Partial proofs are not yet supported (into)");
let root = proof.root;
collected_decoded_storages.insert(hashed_address, proof);
root
}
@@ -1735,9 +1711,9 @@ where
let wait_start = Instant::now();
if let Ok(proof_msg) = receiver.recv() {
*storage_wait_time += wait_start.elapsed();
let proof_result = proof_msg.result?;
let proof = Into::<Option<DecodedStorageMultiProof>>::into(proof_result)
.expect("Partial proofs are not yet supported");
let StorageProofResult::Legacy { proof } = proof_msg.result? else {
unreachable!("v2 result in legacy worker")
};
collected_decoded_storages.insert(hashed_address, proof);
}
}

View File

@@ -10,7 +10,7 @@ use reth_trie::{
hashed_cursor::HashedStorageCursor,
proof_v2::{DeferredValueEncoder, LeafValueEncoder, StorageProofCalculator},
trie_cursor::TrieStorageCursor,
ProofTrieNode,
ProofTrieNodeV2,
};
use std::{
rc::Rc,
@@ -59,7 +59,7 @@ pub(crate) enum AsyncAccountDeferredValueEncoder<TC, HC> {
proof_result_rx:
Option<Result<CrossbeamReceiver<StorageProofResultMessage>, DatabaseError>>,
/// Shared storage proof results.
storage_proof_results: Rc<RefCell<B256Map<Vec<ProofTrieNode>>>>,
storage_proof_results: Rc<RefCell<B256Map<Vec<ProofTrieNodeV2>>>>,
/// Shared stats for tracking wait time and counts.
stats: Rc<RefCell<ValueEncoderStats>>,
/// Shared storage proof calculator for synchronous fallback when dispatched proof has no
@@ -226,7 +226,7 @@ pub(crate) struct AsyncAccountValueEncoder<TC, HC> {
cached_storage_roots: Arc<DashMap<B256, B256>>,
/// Tracks storage proof results received from the storage workers. [`Rc`] + [`RefCell`] is
/// required because [`DeferredValueEncoder`] cannot have a lifetime.
storage_proof_results: Rc<RefCell<B256Map<Vec<ProofTrieNode>>>>,
storage_proof_results: Rc<RefCell<B256Map<Vec<ProofTrieNodeV2>>>>,
/// Shared storage proof calculator for synchronous computation. Reuses cursors and internal
/// buffers across multiple storage root calculations.
storage_calculator: Rc<RefCell<StorageProofCalculator<TC, HC>>>,
@@ -267,7 +267,7 @@ impl<TC, HC> AsyncAccountValueEncoder<TC, HC> {
/// been dropped.
pub(crate) fn finalize(
self,
) -> Result<(B256Map<Vec<ProofTrieNode>>, ValueEncoderStats), StateProofError> {
) -> Result<(B256Map<Vec<ProofTrieNodeV2>>, ValueEncoderStats), StateProofError> {
let mut storage_proof_results = Rc::into_inner(self.storage_proof_results)
.expect("no deferred encoders are still allocated")
.into_inner();

File diff suppressed because it is too large Load Diff

View File

@@ -3,23 +3,20 @@ use crate::{
traits::SparseTrie as SparseTrieTrait,
ParallelSparseTrie, RevealableSparseTrie,
};
use alloc::{collections::VecDeque, vec::Vec};
use alloc::vec::Vec;
use alloy_primitives::{
map::{B256Map, B256Set, HashSet},
Bytes, B256,
B256,
};
use alloy_rlp::{Decodable, Encodable};
use alloy_trie::proof::DecodedProofNodes;
use reth_execution_errors::{SparseStateTrieErrorKind, SparseStateTrieResult, SparseTrieErrorKind};
use reth_primitives_traits::Account;
#[cfg(feature = "std")]
use reth_primitives_traits::FastInstant as Instant;
use reth_trie_common::{
proof::ProofNodes,
updates::{StorageTrieUpdates, TrieUpdates},
BranchNodeMasks, BranchNodeMasksMap, DecodedMultiProof, DecodedStorageMultiProof, MultiProof,
Nibbles, ProofTrieNode, RlpNode, StorageMultiProof, TrieAccount, TrieNode, EMPTY_ROOT_HASH,
TRIE_ACCOUNT_RLP_MAX_SIZE,
BranchNodeMasks, DecodedMultiProof, MultiProof, Nibbles, ProofTrieNodeV2, TrieAccount,
TrieNodeV2, EMPTY_ROOT_HASH, TRIE_ACCOUNT_RLP_MAX_SIZE,
};
#[cfg(feature = "std")]
use tracing::debug;
@@ -32,7 +29,7 @@ use tracing::{instrument, trace};
#[derive(Debug, Default)]
pub struct DeferredDrops {
/// Each nodes reveal operation creates a new buffer, uses it, and pushes it here.
pub proof_nodes_bufs: Vec<Vec<ProofTrieNode>>,
pub proof_nodes_bufs: Vec<Vec<ProofTrieNodeV2>>,
}
#[derive(Debug)]
@@ -269,83 +266,7 @@ where
&mut self,
multiproof: DecodedMultiProof,
) -> SparseStateTrieResult<()> {
let DecodedMultiProof { account_subtree, storages, branch_node_masks } = multiproof;
// first reveal the account proof nodes
self.reveal_decoded_account_multiproof(account_subtree, branch_node_masks)?;
#[cfg(not(feature = "std"))]
// If nostd then serially reveal storage proof nodes for each storage trie
{
for (account, storage_subtree) in storages {
self.reveal_decoded_storage_multiproof(account, storage_subtree)?;
// Mark this storage trie as hot (accessed this tick)
self.storage.modifications.mark_accessed(account);
}
Ok(())
}
#[cfg(feature = "std")]
// If std then reveal storage proofs in parallel
{
use rayon::iter::ParallelIterator;
use reth_primitives_traits::ParallelBridgeBuffered;
let retain_updates = self.retain_updates;
// Process all storage trie revealings in parallel, having first removed the
// `reveal_nodes` tracking and `RevealableSparseTrie`s for each account from their
// HashMaps. These will be returned after processing.
let results: Vec<_> = storages
.into_iter()
.map(|(account, storage_subtree)| {
let revealed_nodes = self.storage.take_or_create_revealed_paths(&account);
let trie = self.storage.take_or_create_trie(&account);
(account, storage_subtree, revealed_nodes, trie)
})
.par_bridge_buffered()
.map(|(account, storage_subtree, mut revealed_nodes, mut trie)| {
let mut bufs = Vec::new();
let result = Self::reveal_decoded_storage_multiproof_inner(
account,
storage_subtree,
&mut revealed_nodes,
&mut trie,
&mut bufs,
retain_updates,
);
(account, revealed_nodes, trie, result, bufs)
})
.collect();
// Return `revealed_nodes` and `RevealableSparseTrie` for each account, incrementing
// metrics and returning the last error seen if any.
let mut any_err = Ok(());
for (account, revealed_nodes, trie, result, bufs) in results {
self.storage.revealed_paths.insert(account, revealed_nodes);
self.storage.tries.insert(account, trie);
// Mark this storage trie as hot (accessed this tick)
self.storage.modifications.mark_accessed(account);
if let Ok(_metric_values) = result {
#[cfg(feature = "metrics")]
{
self.metrics
.increment_total_storage_nodes(_metric_values.total_nodes as u64);
self.metrics
.increment_skipped_storage_nodes(_metric_values.skipped_nodes as u64);
}
} else {
any_err = result.map(|_| ());
}
// Keep buffers for deferred dropping
self.deferred_drops.proof_nodes_bufs.extend(bufs);
}
any_err
}
self.reveal_decoded_multiproof_v2(multiproof.into())
}
/// Reveals a V2 decoded multiproof.
@@ -442,66 +363,13 @@ where
}
}
/// Reveals an account multiproof.
pub fn reveal_account_multiproof(
&mut self,
account_subtree: ProofNodes,
branch_node_masks: BranchNodeMasksMap,
) -> SparseStateTrieResult<()> {
// decode the multiproof first
let decoded_multiproof = account_subtree.try_into()?;
self.reveal_decoded_account_multiproof(decoded_multiproof, branch_node_masks)
}
/// Reveals a decoded account multiproof.
pub fn reveal_decoded_account_multiproof(
&mut self,
account_subtree: DecodedProofNodes,
branch_node_masks: BranchNodeMasksMap,
) -> SparseStateTrieResult<()> {
let FilterMappedProofNodes {
root_node,
mut nodes,
new_nodes,
metric_values: _metric_values,
} = filter_map_revealed_nodes(
account_subtree,
&mut self.revealed_account_paths,
&branch_node_masks,
)?;
#[cfg(feature = "metrics")]
{
self.metrics.increment_total_account_nodes(_metric_values.total_nodes as u64);
self.metrics.increment_skipped_account_nodes(_metric_values.skipped_nodes as u64);
}
if let Some(root_node) = root_node {
// Reveal root node if it wasn't already.
trace!(target: "trie::sparse", ?root_node, "Revealing root account node");
let trie =
self.state.reveal_root(root_node.node, root_node.masks, self.retain_updates)?;
// Reserve the capacity for new nodes ahead of time, if the trie implementation
// supports doing so.
trie.reserve_nodes(new_nodes);
trace!(target: "trie::sparse", total_nodes = ?nodes.len(), "Revealing account nodes");
trie.reveal_nodes(&mut nodes)?;
}
// Keep buffer for deferred dropping
self.deferred_drops.proof_nodes_bufs.push(nodes);
Ok(())
}
/// Reveals account proof nodes from a V2 proof.
///
/// V2 proofs already include the masks in the `ProofTrieNode` structure,
/// so no separate masks map is needed.
pub fn reveal_account_v2_proof_nodes(
&mut self,
mut nodes: Vec<ProofTrieNode>,
mut nodes: Vec<ProofTrieNodeV2>,
) -> SparseStateTrieResult<()> {
if self.skip_proof_node_filtering {
let capacity = estimate_v2_proof_capacity(&nodes);
@@ -562,7 +430,7 @@ where
pub fn reveal_storage_v2_proof_nodes(
&mut self,
account: B256,
nodes: Vec<ProofTrieNode>,
nodes: Vec<ProofTrieNodeV2>,
) -> SparseStateTrieResult<()> {
let (trie, revealed_paths) = self.storage.get_trie_and_revealed_paths_mut(account);
let _metric_values = Self::reveal_storage_v2_proof_nodes_inner(
@@ -588,10 +456,10 @@ where
/// designed to handle a variety of associated public functions.
fn reveal_storage_v2_proof_nodes_inner(
account: B256,
mut nodes: Vec<ProofTrieNode>,
mut nodes: Vec<ProofTrieNodeV2>,
revealed_nodes: &mut HashSet<Nibbles>,
trie: &mut RevealableSparseTrie<S>,
bufs: &mut Vec<Vec<ProofTrieNode>>,
bufs: &mut Vec<Vec<ProofTrieNodeV2>>,
retain_updates: bool,
skip_filtering: bool,
) -> SparseStateTrieResult<ProofNodesMetricValues> {
@@ -636,180 +504,6 @@ where
Ok(metric_values)
}
/// Reveals a storage multiproof for the given address.
pub fn reveal_storage_multiproof(
&mut self,
account: B256,
storage_subtree: StorageMultiProof,
) -> SparseStateTrieResult<()> {
// decode the multiproof first
let decoded_multiproof = storage_subtree.try_into()?;
self.reveal_decoded_storage_multiproof(account, decoded_multiproof)
}
/// Reveals a decoded storage multiproof for the given address.
pub fn reveal_decoded_storage_multiproof(
&mut self,
account: B256,
storage_subtree: DecodedStorageMultiProof,
) -> SparseStateTrieResult<()> {
let (trie, revealed_paths) = self.storage.get_trie_and_revealed_paths_mut(account);
let _metric_values = Self::reveal_decoded_storage_multiproof_inner(
account,
storage_subtree,
revealed_paths,
trie,
&mut self.deferred_drops.proof_nodes_bufs,
self.retain_updates,
)?;
#[cfg(feature = "metrics")]
{
self.metrics.increment_total_storage_nodes(_metric_values.total_nodes as u64);
self.metrics.increment_skipped_storage_nodes(_metric_values.skipped_nodes as u64);
}
Ok(())
}
/// Reveals a decoded storage multiproof for the given address. This is internal static function
/// is designed to handle a variety of associated public functions.
fn reveal_decoded_storage_multiproof_inner(
account: B256,
storage_subtree: DecodedStorageMultiProof,
revealed_nodes: &mut HashSet<Nibbles>,
trie: &mut RevealableSparseTrie<S>,
bufs: &mut Vec<Vec<ProofTrieNode>>,
retain_updates: bool,
) -> SparseStateTrieResult<ProofNodesMetricValues> {
let FilterMappedProofNodes { root_node, mut nodes, new_nodes, metric_values } =
filter_map_revealed_nodes(
storage_subtree.subtree,
revealed_nodes,
&storage_subtree.branch_node_masks,
)?;
if let Some(root_node) = root_node {
// Reveal root node if it wasn't already.
trace!(target: "trie::sparse", ?account, ?root_node, "Revealing root storage node");
let trie = trie.reveal_root(root_node.node, root_node.masks, retain_updates)?;
// Reserve the capacity for new nodes ahead of time, if the trie implementation
// supports doing so.
trie.reserve_nodes(new_nodes);
trace!(target: "trie::sparse", ?account, total_nodes = ?nodes.len(), "Revealing storage nodes");
trie.reveal_nodes(&mut nodes)?;
}
// Keep buffer for deferred dropping
bufs.push(nodes);
Ok(metric_values)
}
/// Reveal state witness with the given state root.
/// The state witness is expected to be a map of `keccak(rlp(node)): rlp(node).`
/// NOTE: This method does not extensively validate the witness.
pub fn reveal_witness(
&mut self,
state_root: B256,
witness: &B256Map<Bytes>,
) -> SparseStateTrieResult<()> {
// Create a `(hash, path, maybe_account)` queue for traversing witness trie nodes
// starting from the root node.
let mut queue = VecDeque::from([(state_root, Nibbles::default(), None)]);
while let Some((hash, path, maybe_account)) = queue.pop_front() {
// Retrieve the trie node and decode it.
let Some(trie_node_bytes) = witness.get(&hash) else { continue };
let trie_node = TrieNode::decode(&mut &trie_node_bytes[..])?;
// Push children nodes into the queue.
match &trie_node {
TrieNode::Branch(branch) => {
for (idx, maybe_child) in branch.as_ref().children() {
if let Some(child_hash) = maybe_child.and_then(RlpNode::as_hash) {
let mut child_path = path;
child_path.push_unchecked(idx);
queue.push_back((child_hash, child_path, maybe_account));
}
}
}
TrieNode::Extension(ext) => {
if let Some(child_hash) = ext.child.as_hash() {
let mut child_path = path;
child_path.extend(&ext.key);
queue.push_back((child_hash, child_path, maybe_account));
}
}
TrieNode::Leaf(leaf) => {
let mut full_path = path;
full_path.extend(&leaf.key);
if maybe_account.is_none() {
let hashed_address = B256::from_slice(&full_path.pack());
let account = TrieAccount::decode(&mut &leaf.value[..])?;
if account.storage_root != EMPTY_ROOT_HASH {
queue.push_back((
account.storage_root,
Nibbles::default(),
Some(hashed_address),
));
}
}
}
TrieNode::EmptyRoot => {} // nothing to do here
};
// Reveal the node itself.
if let Some(account) = maybe_account {
// Check that the path was not already revealed.
if self
.storage
.revealed_paths
.get(&account)
.is_none_or(|paths| !paths.contains(&path))
{
let retain_updates = self.retain_updates;
let (storage_trie_entry, revealed_storage_paths) =
self.storage.get_trie_and_revealed_paths_mut(account);
if path.is_empty() {
// Handle special storage state root node case.
storage_trie_entry.reveal_root(trie_node, None, retain_updates)?;
} else {
// Reveal non-root storage trie node.
storage_trie_entry
.as_revealed_mut()
.ok_or(SparseTrieErrorKind::Blind)?
.reveal_node(path, trie_node, None)?;
}
// Track the revealed path.
revealed_storage_paths.insert(path);
}
}
// Check that the path was not already revealed.
else if !self.revealed_account_paths.contains(&path) {
if path.is_empty() {
// Handle special state root node case.
self.state.reveal_root(trie_node, None, self.retain_updates)?;
} else {
// Reveal non-root state trie node.
self.state
.as_revealed_mut()
.ok_or(SparseTrieErrorKind::Blind)?
.reveal_node(path, trie_node, None)?;
}
// Track the revealed path.
self.revealed_account_paths.insert(path);
}
}
Ok(())
}
/// Wipe the storage trie at the provided address.
pub fn wipe_storage(&mut self, address: B256) -> SparseStateTrieResult<()> {
if let Some(trie) = self.storage.tries.get_mut(&address) {
@@ -846,11 +540,11 @@ where
.account_node_provider()
.trie_node(&Nibbles::default())?
.map(|node| {
TrieNode::decode(&mut &node.node[..])
TrieNodeV2::decode(&mut &node.node[..])
.map(|decoded| (decoded, node.hash_mask, node.tree_mask))
})
.transpose()?
.unwrap_or((TrieNode::EmptyRoot, None, None));
.unwrap_or((TrieNodeV2::EmptyRoot, None, None));
let masks = BranchNodeMasks::from_optional(hash_mask, tree_mask);
self.state.reveal_root(root_node, masks, self.retain_updates).map_err(Into::into)
}
@@ -1491,97 +1185,13 @@ struct ProofNodesMetricValues {
skipped_nodes: usize,
}
/// Result of [`filter_map_revealed_nodes`].
#[derive(Debug, PartialEq, Eq)]
struct FilterMappedProofNodes {
/// Root node which was pulled out of the original node set to be handled specially.
root_node: Option<ProofTrieNode>,
/// Filtered, decoded and unsorted proof nodes. Root node is removed.
nodes: Vec<ProofTrieNode>,
/// Number of new nodes that will be revealed. This includes all children of branch nodes, even
/// if they are not in the proof.
new_nodes: usize,
/// Values which are being returned so they can be incremented into metrics.
metric_values: ProofNodesMetricValues,
}
/// Filters the decoded nodes that are already revealed, maps them to `SparseTrieNode`s,
/// separates the root node if present, and returns additional information about the number of
/// total, skipped, and new nodes.
fn filter_map_revealed_nodes(
proof_nodes: DecodedProofNodes,
revealed_nodes: &mut HashSet<Nibbles>,
branch_node_masks: &BranchNodeMasksMap,
) -> SparseStateTrieResult<FilterMappedProofNodes> {
let mut result = FilterMappedProofNodes {
root_node: None,
nodes: Vec::with_capacity(proof_nodes.len()),
new_nodes: 0,
metric_values: Default::default(),
};
let proof_nodes_len = proof_nodes.len();
for (path, proof_node) in proof_nodes.into_inner() {
result.metric_values.total_nodes += 1;
let is_root = path.is_empty();
// If the node is already revealed, skip it. We don't ever skip the root node, nor do we add
// it to `revealed_nodes`.
if !is_root && !revealed_nodes.insert(path) {
result.metric_values.skipped_nodes += 1;
continue
}
result.new_nodes += 1;
// Extract hash/tree masks based on the node type (only branch nodes have masks). At the
// same time increase the new_nodes counter if the node is a type which has children.
let masks = match &proof_node {
TrieNode::Branch(branch) => {
// If it's a branch node, increase the number of new nodes by the number of children
// according to the state mask.
result.new_nodes += branch.state_mask.count_ones() as usize;
branch_node_masks.get(&path).copied()
}
TrieNode::Extension(_) => {
// There is always exactly one child of an extension node.
result.new_nodes += 1;
None
}
_ => None,
};
let node = ProofTrieNode { path, node: proof_node, masks };
if is_root {
// Perform sanity check.
if matches!(node.node, TrieNode::EmptyRoot) && proof_nodes_len > 1 {
return Err(SparseStateTrieErrorKind::InvalidRootNode {
path,
node: alloy_rlp::encode(&node.node).into(),
}
.into())
}
result.root_node = Some(node);
continue
}
result.nodes.push(node);
}
Ok(result)
}
/// Result of [`filter_revealed_v2_proof_nodes`].
#[derive(Debug, PartialEq, Eq)]
struct FilteredV2ProofNodes {
/// Root node which was pulled out of the original node set to be handled specially.
root_node: Option<ProofTrieNode>,
root_node: Option<ProofTrieNodeV2>,
/// Filtered proof nodes. Root node is removed.
nodes: Vec<ProofTrieNode>,
nodes: Vec<ProofTrieNodeV2>,
/// Number of new nodes that will be revealed. This includes all children of branch nodes, even
/// if they are not in the proof.
new_nodes: usize,
@@ -1594,19 +1204,13 @@ struct FilteredV2ProofNodes {
/// This counts nodes and their children (for branch and extension nodes) to provide
/// proper capacity hints for `reserve_nodes`. Used when `skip_proof_node_filtering` is
/// enabled and no filtering is performed.
fn estimate_v2_proof_capacity(nodes: &[ProofTrieNode]) -> usize {
fn estimate_v2_proof_capacity(nodes: &[ProofTrieNodeV2]) -> usize {
let mut capacity = nodes.len();
for node in nodes {
match &node.node {
TrieNode::Branch(branch) => {
capacity += branch.state_mask.count_ones() as usize;
}
TrieNode::Extension(_) => {
capacity += 1;
}
_ => {}
};
if let TrieNodeV2::Branch(branch) = &node.node {
capacity += branch.state_mask.count_ones() as usize;
}
}
capacity
@@ -1615,10 +1219,10 @@ fn estimate_v2_proof_capacity(nodes: &[ProofTrieNode]) -> usize {
/// Filters V2 proof nodes that are already revealed, separates the root node if present, and
/// returns additional information about the number of total, skipped, and new nodes.
///
/// Unlike [`filter_map_revealed_nodes`], V2 proof nodes already have masks included in the
/// `ProofTrieNode` structure, so no separate masks map is needed.
/// V2 proof nodes already have masks included in the `ProofTrieNode` structure, so no separate
/// masks map is needed.
fn filter_revealed_v2_proof_nodes(
proof_nodes: Vec<ProofTrieNode>,
proof_nodes: Vec<ProofTrieNodeV2>,
revealed_nodes: &mut HashSet<Nibbles>,
) -> SparseStateTrieResult<FilteredV2ProofNodes> {
let mut result = FilteredV2ProofNodes {
@@ -1632,7 +1236,7 @@ fn filter_revealed_v2_proof_nodes(
// duplicate EmptyRoot nodes may appear (e.g., storage proofs split across chunks for an
// account with empty storage). We only error if there's an EmptyRoot alongside real nodes.
let non_empty_root_count =
proof_nodes.iter().filter(|n| !matches!(n.node, TrieNode::EmptyRoot)).count();
proof_nodes.iter().filter(|n| !matches!(n.node, TrieNodeV2::EmptyRoot)).count();
for node in proof_nodes {
result.metric_values.total_nodes += 1;
@@ -1649,22 +1253,20 @@ fn filter_revealed_v2_proof_nodes(
result.new_nodes += 1;
// Count children for capacity estimation
match &node.node {
TrieNode::Branch(branch) => {
result.new_nodes += branch.state_mask.count_ones() as usize;
}
TrieNode::Extension(_) => {
result.new_nodes += 1;
}
_ => {}
};
if let TrieNodeV2::Branch(branch) = &node.node {
result.new_nodes += branch.state_mask.count_ones() as usize;
}
if is_root {
// Perform sanity check: EmptyRoot is only valid if there are no other real nodes.
if matches!(node.node, TrieNode::EmptyRoot) && non_empty_root_count > 0 {
if matches!(node.node, TrieNodeV2::EmptyRoot) && non_empty_root_count > 0 {
return Err(SparseStateTrieErrorKind::InvalidRootNode {
path: node.path,
node: alloy_rlp::encode(&node.node).into(),
node: {
let mut buf = Vec::new();
node.node.encode(&mut buf);
buf.into()
},
}
.into())
}
@@ -1694,7 +1296,8 @@ mod tests {
use reth_trie::{updates::StorageTrieUpdates, HashBuilder, MultiProof, EMPTY_ROOT_HASH};
use reth_trie_common::{
proof::{ProofNodes, ProofRetainer},
BranchNode, BranchNodeMasks, BranchNodeMasksMap, LeafNode, StorageMultiProof, TrieMask,
BranchNodeMasks, BranchNodeMasksMap, BranchNodeV2, LeafNode, RlpNode, StorageMultiProof,
TrieMask,
};
#[test]
@@ -1703,11 +1306,11 @@ mod tests {
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
let leaf_value = alloy_rlp::encode(TrieAccount::default());
let leaf_1 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
let leaf_1 = alloy_rlp::encode(TrieNodeV2::Leaf(LeafNode::new(
Nibbles::default(),
leaf_value.clone(),
)));
let leaf_2 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
let leaf_2 = alloy_rlp::encode(TrieNodeV2::Leaf(LeafNode::new(
Nibbles::default(),
leaf_value.clone(),
)));
@@ -1716,9 +1319,11 @@ mod tests {
account_subtree: ProofNodes::from_iter([
(
Nibbles::default(),
alloy_rlp::encode(TrieNode::Branch(BranchNode {
alloy_rlp::encode(TrieNodeV2::Branch(BranchNodeV2 {
key: Nibbles::default(),
stack: vec![RlpNode::from_rlp(&leaf_1), RlpNode::from_rlp(&leaf_2)],
state_mask: TrieMask::new(0b11),
branch_rlp_node: None,
}))
.into(),
),
@@ -1772,11 +1377,11 @@ mod tests {
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
let leaf_value = alloy_rlp::encode(TrieAccount::default());
let leaf_1 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
let leaf_1 = alloy_rlp::encode(TrieNodeV2::Leaf(LeafNode::new(
Nibbles::default(),
leaf_value.clone(),
)));
let leaf_2 = alloy_rlp::encode(TrieNode::Leaf(LeafNode::new(
let leaf_2 = alloy_rlp::encode(TrieNodeV2::Leaf(LeafNode::new(
Nibbles::default(),
leaf_value.clone(),
)));
@@ -1789,9 +1394,11 @@ mod tests {
subtree: ProofNodes::from_iter([
(
Nibbles::default(),
alloy_rlp::encode(TrieNode::Branch(BranchNode {
alloy_rlp::encode(TrieNodeV2::Branch(BranchNodeV2 {
key: Nibbles::default(),
stack: vec![RlpNode::from_rlp(&leaf_1), RlpNode::from_rlp(&leaf_2)],
state_mask: TrieMask::new(0b11),
branch_rlp_node: None,
}))
.into(),
),
@@ -1862,20 +1469,22 @@ mod tests {
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
let leaf_value = alloy_rlp::encode(TrieAccount::default());
let leaf_1_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), leaf_value.clone()));
let leaf_2_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), leaf_value.clone()));
let leaf_1_node = TrieNodeV2::Leaf(LeafNode::new(Nibbles::default(), leaf_value.clone()));
let leaf_2_node = TrieNodeV2::Leaf(LeafNode::new(Nibbles::default(), leaf_value.clone()));
let branch_node = TrieNode::Branch(BranchNode {
let branch_node = TrieNodeV2::Branch(BranchNodeV2 {
key: Nibbles::default(),
stack: vec![
RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1_node)),
RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2_node)),
],
state_mask: TrieMask::new(0b11),
branch_rlp_node: None,
});
// Create V2 proof nodes with masks already included
let v2_proof_nodes = vec![
ProofTrieNode {
ProofTrieNodeV2 {
path: Nibbles::default(),
node: branch_node,
masks: Some(BranchNodeMasks {
@@ -1883,8 +1492,8 @@ mod tests {
tree_mask: TrieMask::default(),
}),
},
ProofTrieNode { path: Nibbles::from_nibbles([0x0]), node: leaf_1_node, masks: None },
ProofTrieNode { path: Nibbles::from_nibbles([0x1]), node: leaf_2_node, masks: None },
ProofTrieNodeV2 { path: Nibbles::from_nibbles([0x0]), node: leaf_1_node, masks: None },
ProofTrieNodeV2 { path: Nibbles::from_nibbles([0x1]), node: leaf_2_node, masks: None },
];
// Reveal V2 proof nodes
@@ -1923,21 +1532,25 @@ mod tests {
let mut sparse = SparseStateTrie::<ParallelSparseTrie>::default();
let storage_value: Vec<u8> = alloy_rlp::encode_fixed_size(&U256::from(42)).to_vec();
let leaf_1_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), storage_value.clone()));
let leaf_2_node = TrieNode::Leaf(LeafNode::new(Nibbles::default(), storage_value.clone()));
let leaf_1_node =
TrieNodeV2::Leaf(LeafNode::new(Nibbles::default(), storage_value.clone()));
let leaf_2_node =
TrieNodeV2::Leaf(LeafNode::new(Nibbles::default(), storage_value.clone()));
let branch_node = TrieNode::Branch(BranchNode {
let branch_node = TrieNodeV2::Branch(BranchNodeV2 {
key: Nibbles::default(),
stack: vec![
RlpNode::from_rlp(&alloy_rlp::encode(&leaf_1_node)),
RlpNode::from_rlp(&alloy_rlp::encode(&leaf_2_node)),
],
state_mask: TrieMask::new(0b11),
branch_rlp_node: None,
});
let v2_proof_nodes = vec![
ProofTrieNode { path: Nibbles::default(), node: branch_node, masks: None },
ProofTrieNode { path: Nibbles::from_nibbles([0x0]), node: leaf_1_node, masks: None },
ProofTrieNode { path: Nibbles::from_nibbles([0x1]), node: leaf_2_node, masks: None },
ProofTrieNodeV2 { path: Nibbles::default(), node: branch_node, masks: None },
ProofTrieNodeV2 { path: Nibbles::from_nibbles([0x0]), node: leaf_1_node, masks: None },
ProofTrieNodeV2 { path: Nibbles::from_nibbles([0x1]), node: leaf_2_node, masks: None },
];
// Reveal V2 storage proof nodes for account
@@ -2132,51 +1745,4 @@ mod tests {
}
);
}
#[test]
fn test_filter_map_revealed_nodes() {
let mut revealed_nodes = HashSet::from_iter([Nibbles::from_nibbles([0x0])]);
let leaf = TrieNode::Leaf(LeafNode::new(Nibbles::default(), alloy_rlp::encode([])));
let leaf_encoded = alloy_rlp::encode(&leaf);
let branch = TrieNode::Branch(BranchNode::new(
vec![RlpNode::from_rlp(&leaf_encoded), RlpNode::from_rlp(&leaf_encoded)],
TrieMask::new(0b11),
));
let proof_nodes = alloy_trie::proof::DecodedProofNodes::from_iter([
(Nibbles::default(), branch.clone()),
(Nibbles::from_nibbles([0x0]), leaf.clone()),
(Nibbles::from_nibbles([0x1]), leaf.clone()),
]);
let branch_node_masks = BranchNodeMasksMap::default();
let decoded =
filter_map_revealed_nodes(proof_nodes, &mut revealed_nodes, &branch_node_masks)
.unwrap();
assert_eq!(
decoded,
FilterMappedProofNodes {
root_node: Some(ProofTrieNode {
path: Nibbles::default(),
node: branch,
masks: None,
}),
nodes: vec![ProofTrieNode {
path: Nibbles::from_nibbles([0x1]),
node: leaf,
masks: None,
}],
// Branch, two of its children, one leaf
new_nodes: 4,
// Metric values
metric_values: ProofNodesMetricValues {
// Branch, leaf, leaf
total_nodes: 3,
// Revealed leaf node with path 0x1
skipped_nodes: 1,
},
}
);
}
}

View File

@@ -9,7 +9,7 @@ use alloy_primitives::{
};
use alloy_trie::BranchNodeCompact;
use reth_execution_errors::SparseTrieResult;
use reth_trie_common::{BranchNodeMasks, Nibbles, ProofTrieNode, TrieNode};
use reth_trie_common::{BranchNodeMasks, Nibbles, ProofTrieNodeV2, TrieNodeV2};
use crate::provider::TrieNodeProvider;
@@ -59,7 +59,7 @@ pub trait SparseTrie: Sized + Debug + Send + Sync {
/// May panic if the trie is not new/cleared, and has already revealed nodes.
fn set_root(
&mut self,
root: TrieNode,
root: TrieNodeV2,
masks: Option<BranchNodeMasks>,
retain_updates: bool,
) -> SparseTrieResult<()>;
@@ -69,7 +69,7 @@ pub trait SparseTrie: Sized + Debug + Send + Sync {
/// See [`Self::set_root`] for more details.
fn with_root(
mut self,
root: TrieNode,
root: TrieNodeV2,
masks: Option<BranchNodeMasks>,
retain_updates: bool,
) -> SparseTrieResult<Self> {
@@ -111,10 +111,10 @@ pub trait SparseTrie: Sized + Debug + Send + Sync {
fn reveal_node(
&mut self,
path: Nibbles,
node: TrieNode,
node: TrieNodeV2,
masks: Option<BranchNodeMasks>,
) -> SparseTrieResult<()> {
self.reveal_nodes(&mut [ProofTrieNode { path, node, masks }])
self.reveal_nodes(&mut [ProofTrieNodeV2 { path, node, masks }])
}
/// Reveals one or more trie nodes if they have not been revealed before.
@@ -135,8 +135,8 @@ pub trait SparseTrie: Sized + Debug + Send + Sync {
/// # Note
///
/// The implementation may modify the input nodes. A common thing to do is [`std::mem::replace`]
/// each node with [`TrieNode::EmptyRoot`] to avoid cloning.
fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNode]) -> SparseTrieResult<()>;
/// each node with [`TrieNodeV2::EmptyRoot`] to avoid cloning.
fn reveal_nodes(&mut self, nodes: &mut [ProofTrieNodeV2]) -> SparseTrieResult<()>;
/// Updates the value of a leaf node at the specified path.
///

View File

@@ -5,7 +5,7 @@ use crate::{
use alloc::{boxed::Box, vec::Vec};
use alloy_primitives::{map::B256Map, B256};
use reth_execution_errors::{SparseTrieErrorKind, SparseTrieResult};
use reth_trie_common::{BranchNodeMasks, Nibbles, RlpNode, TrieMask, TrieNode};
use reth_trie_common::{BranchNodeMasks, Nibbles, RlpNode, TrieMask, TrieNode, TrieNodeV2};
use tracing::instrument;
/// A sparse trie that is either in a "blind" state (no nodes are revealed, root node hash is
@@ -63,7 +63,7 @@ impl<T: SparseTrieTrait + Default> RevealableSparseTrie<T> {
/// A mutable reference to the underlying [`RevealableSparseTrie`](SparseTrieTrait).
pub fn reveal_root(
&mut self,
root: TrieNode,
root: TrieNodeV2,
masks: Option<BranchNodeMasks>,
retain_updates: bool,
) -> SparseTrieResult<&mut T> {

View File

@@ -31,7 +31,7 @@ pub use trie_node::*;
/// on the hash builder and follows the same algorithm as the state root calculator.
/// See `StateRoot::root` for more info.
#[derive(Debug)]
pub struct Proof<T, H> {
pub struct Proof<T, H, K = AddedRemovedKeys> {
/// The factory for traversing trie nodes.
trie_cursor_factory: T,
/// The factory for hashed cursors.
@@ -40,6 +40,8 @@ pub struct Proof<T, H> {
prefix_sets: TriePrefixSetsMut,
/// Flag indicating whether to include branch node masks in the proof.
collect_branch_node_masks: bool,
/// Added and removed keys for proof retention.
added_removed_keys: Option<K>,
}
impl<T, H> Proof<T, H> {
@@ -50,26 +52,31 @@ impl<T, H> Proof<T, H> {
hashed_cursor_factory: h,
prefix_sets: TriePrefixSetsMut::default(),
collect_branch_node_masks: false,
added_removed_keys: None,
}
}
}
impl<T, H, K> Proof<T, H, K> {
/// Set the trie cursor factory.
pub fn with_trie_cursor_factory<TF>(self, trie_cursor_factory: TF) -> Proof<TF, H> {
pub fn with_trie_cursor_factory<TF>(self, trie_cursor_factory: TF) -> Proof<TF, H, K> {
Proof {
trie_cursor_factory,
hashed_cursor_factory: self.hashed_cursor_factory,
prefix_sets: self.prefix_sets,
collect_branch_node_masks: self.collect_branch_node_masks,
added_removed_keys: self.added_removed_keys,
}
}
/// Set the hashed cursor factory.
pub fn with_hashed_cursor_factory<HF>(self, hashed_cursor_factory: HF) -> Proof<T, HF> {
pub fn with_hashed_cursor_factory<HF>(self, hashed_cursor_factory: HF) -> Proof<T, HF, K> {
Proof {
trie_cursor_factory: self.trie_cursor_factory,
hashed_cursor_factory,
prefix_sets: self.prefix_sets,
collect_branch_node_masks: self.collect_branch_node_masks,
added_removed_keys: self.added_removed_keys,
}
}
@@ -85,6 +92,21 @@ impl<T, H> Proof<T, H> {
self
}
/// Configures the proof to retain certain nodes which would otherwise fall outside the target
/// set, when those nodes might be required to calculate the state root when keys have been
/// added or removed to the trie.
///
/// If None is given then retention of extra proofs is disabled.
pub fn with_added_removed_keys<K2>(self, added_removed_keys: Option<K2>) -> Proof<T, H, K2> {
Proof {
trie_cursor_factory: self.trie_cursor_factory,
hashed_cursor_factory: self.hashed_cursor_factory,
prefix_sets: self.prefix_sets,
collect_branch_node_masks: self.collect_branch_node_masks,
added_removed_keys,
}
}
/// Get a reference to the trie cursor factory.
pub const fn trie_cursor_factory(&self) -> &T {
&self.trie_cursor_factory
@@ -96,10 +118,11 @@ impl<T, H> Proof<T, H> {
}
}
impl<T, H> Proof<T, H>
impl<T, H, K> Proof<T, H, K>
where
T: TrieCursorFactory + Clone,
H: HashedCursorFactory + Clone,
K: AsRef<AddedRemovedKeys>,
{
/// Generate an account proof from intermediate nodes.
pub fn account_proof(
@@ -126,10 +149,13 @@ where
// Create the walker.
let mut prefix_set = self.prefix_sets.account_prefix_set.clone();
prefix_set.extend_keys(targets.keys().map(Nibbles::unpack));
let walker = TrieWalker::<_>::state_trie(trie_cursor, prefix_set.freeze());
let walker =
TrieWalker::<_, AddedRemovedKeys>::state_trie(trie_cursor, prefix_set.freeze())
.with_added_removed_keys(self.added_removed_keys.as_ref());
// Create a hash builder to rebuild the root node since it is not available in the database.
let retainer = targets.keys().map(Nibbles::unpack).collect();
let retainer: ProofRetainer = targets.keys().map(Nibbles::unpack).collect();
let retainer = retainer.with_added_removed_keys(self.added_removed_keys.as_ref());
let mut hash_builder = HashBuilder::default()
.with_proof_retainer(retainer)
.with_updates(self.collect_branch_node_masks);

View File

@@ -15,7 +15,9 @@ use alloy_primitives::{keccak256, B256, U256};
use alloy_rlp::Encodable;
use alloy_trie::{BranchNodeCompact, TrieMask};
use reth_execution_errors::trie::StateProofError;
use reth_trie_common::{BranchNode, BranchNodeMasks, Nibbles, ProofTrieNode, RlpNode, TrieNode};
use reth_trie_common::{
BranchNodeMasks, BranchNodeRef, BranchNodeV2, Nibbles, ProofTrieNodeV2, RlpNode, TrieNodeV2,
};
use std::cmp::Ordering;
use tracing::{error, instrument, trace};
@@ -86,7 +88,7 @@ pub struct ProofCalculator<TC, HC, VE: LeafValueEncoder> {
cached_branch_stack: Vec<(Nibbles, BranchNodeCompact)>,
/// The proofs which will be returned from the calculation. This gets taken at the end of every
/// proof call.
retained_proofs: Vec<ProofTrieNode>,
retained_proofs: Vec<ProofTrieNodeV2>,
/// Free-list of re-usable buffers of [`RlpNode`]s, used for encoding branch nodes to RLP.
///
/// We are generally able to re-use these buffers across different branch nodes for the
@@ -193,7 +195,7 @@ where
trace!(target: TRACE_TARGET, ?path, target = ?lower, "should_retain: called");
debug_assert!(self.retained_proofs.last().is_none_or(
|ProofTrieNode { path: last_retained_path, .. }| {
|ProofTrieNodeV2 { path: last_retained_path, .. }| {
depth_first::cmp(path, last_retained_path) == Ordering::Greater
}
),
@@ -268,14 +270,14 @@ where
if self.should_retain(targets, &child_path, true) {
trace!(target: TRACE_TARGET, ?child_path, "Retaining child");
// Convert to `ProofTrieNode`, which will be what is retained.
// Convert to `ProofTrieNodeV2`, which will be what is retained.
//
// If this node is a branch then its `rlp_nodes_buf` will be taken and not returned to
// the `rlp_nodes_bufs` free-list.
self.rlp_encode_buf.clear();
let proof_node = child.into_proof_trie_node(child_path, &mut self.rlp_encode_buf)?;
// Use the `ProofTrieNode` to encode the `RlpNode`, and then push it onto retained
// Use the `ProofTrieNodeV2` to encode the `RlpNode`, and then push it onto retained
// nodes before returning.
self.rlp_encode_buf.clear();
proof_node.node.encode(&mut self.rlp_encode_buf);
@@ -540,17 +542,19 @@ where
self.branch_path.len() - branch.ext_len as usize,
);
// Wrap the `BranchNode` so it can be pushed onto the child stack.
let mut branch_as_child = ProofTrieBranchChild::Branch {
node: BranchNode::new(rlp_nodes_buf, branch.state_mask),
masks: branch.masks,
// Compute hash for the branch node if it has a parent extension.
let rlp_node = if short_key.is_empty() {
None
} else {
self.rlp_encode_buf.clear();
BranchNodeRef::new(&rlp_nodes_buf, branch.state_mask).encode(&mut self.rlp_encode_buf);
Some(RlpNode::from_rlp(&self.rlp_encode_buf))
};
// If there is an extension then encode the branch as an `RlpNode` and use it to construct
// the extension in its place
if !short_key.is_empty() {
let branch_rlp_node = self.commit_child(targets, self.branch_path, branch_as_child)?;
branch_as_child = ProofTrieBranchChild::Extension { short_key, child: branch_rlp_node };
// Wrap the `BranchNodeV2` so it can be pushed onto the child stack.
let branch_as_child = ProofTrieBranchChild::Branch {
node: BranchNodeV2::new(short_key, rlp_nodes_buf, branch.state_mask, rlp_node),
masks: branch.masks,
};
self.child_stack.push(branch_as_child);
@@ -1264,9 +1268,9 @@ where
(true, None) => {
// If `child_stack` is empty it means there was no keys at all, retain an empty
// root node.
self.retained_proofs.push(ProofTrieNode {
self.retained_proofs.push(ProofTrieNodeV2 {
path: Nibbles::new(), // root path
node: TrieNode::EmptyRoot,
node: TrieNodeV2::EmptyRoot,
masks: None,
});
}
@@ -1288,7 +1292,7 @@ where
&mut self,
value_encoder: &mut VE,
targets: &mut [Target],
) -> Result<Vec<ProofTrieNode>, StateProofError> {
) -> Result<Vec<ProofTrieNodeV2>, StateProofError> {
// If there are no targets then nothing could be returned, return early.
if targets.is_empty() {
trace!(target: TRACE_TARGET, "Empty targets, returning");
@@ -1332,7 +1336,7 @@ where
&mut self,
value_encoder: &mut VE,
targets: &mut [Target],
) -> Result<Vec<ProofTrieNode>, StateProofError> {
) -> Result<Vec<ProofTrieNodeV2>, StateProofError> {
self.trie_cursor.reset();
self.hashed_cursor.reset();
self.proof_inner(value_encoder, targets)
@@ -1346,7 +1350,7 @@ where
/// This method reuses the internal RLP encode buffer for efficiency.
pub fn compute_root_hash(
&mut self,
proof_nodes: &[ProofTrieNode],
proof_nodes: &[ProofTrieNodeV2],
) -> Result<Option<B256>, StateProofError> {
// Find the root node (node at empty path)
let root_node = proof_nodes.iter().find(|node| node.path.is_empty());
@@ -1368,7 +1372,10 @@ where
/// This method does not accept targets nor retain proofs. Returns the root node which can
/// be used to compute the root hash via [`Self::compute_root_hash`].
#[instrument(target = TRACE_TARGET, level = "trace", skip(self, value_encoder))]
pub fn root_node(&mut self, value_encoder: &mut VE) -> Result<ProofTrieNode, StateProofError> {
pub fn root_node(
&mut self,
value_encoder: &mut VE,
) -> Result<ProofTrieNodeV2, StateProofError> {
// Initialize the variables which track the state of the two cursors. Both indicate the
// cursors are unseeked.
let mut trie_cursor_state = TrieCursorState::unseeked();
@@ -1434,15 +1441,15 @@ where
&mut self,
hashed_address: B256,
targets: &mut [Target],
) -> Result<Vec<ProofTrieNode>, StateProofError> {
) -> Result<Vec<ProofTrieNodeV2>, StateProofError> {
self.hashed_cursor.set_hashed_address(hashed_address);
// Shortcut: check if storage is empty
if self.hashed_cursor.is_storage_empty()? {
// Return a single EmptyRoot node at the root path
return Ok(vec![ProofTrieNode {
return Ok(vec![ProofTrieNodeV2 {
path: Nibbles::default(),
node: TrieNode::EmptyRoot,
node: TrieNodeV2::EmptyRoot,
masks: None,
}])
}
@@ -1464,13 +1471,13 @@ where
pub fn storage_root_node(
&mut self,
hashed_address: B256,
) -> Result<ProofTrieNode, StateProofError> {
) -> Result<ProofTrieNodeV2, StateProofError> {
self.hashed_cursor.set_hashed_address(hashed_address);
if self.hashed_cursor.is_storage_empty()? {
return Ok(ProofTrieNode {
return Ok(ProofTrieNodeV2 {
path: Nibbles::default(),
node: TrieNode::EmptyRoot,
node: TrieNodeV2::EmptyRoot,
masks: None,
})
}
@@ -1642,16 +1649,35 @@ mod tests {
};
use alloy_primitives::map::{B256Map, B256Set};
use alloy_rlp::Decodable;
use alloy_trie::proof::AddedRemovedKeys;
use itertools::Itertools;
use reth_primitives_traits::Account;
use reth_trie_common::{
updates::{StorageTrieUpdates, TrieUpdates},
HashedPostState, MultiProofTargets, TrieNode,
HashedPostState, MultiProofTargets, ProofTrieNode, TrieNode,
};
/// Target to use with the `tracing` crate.
static TRACE_TARGET: &str = "trie::proof_v2::tests";
/// Converts legacy proofs to V2 proofs by combining extension nodes with their child branch
/// nodes.
///
/// In the legacy proof format, extension nodes and branch nodes are separate. In the V2 format,
/// they are combined into a single `BranchNodeV2` where the extension's key becomes the
/// branch's `key` field.
///
/// Converts legacy proofs (sorted in depth-first order) to V2 format.
///
/// In depth-first order, children come BEFORE parents. So when we encounter an extension node,
/// its child branch has already been processed and is in the result. We need to pop it and
/// combine it with the extension.
fn convert_legacy_proofs_to_v2(legacy_proofs: &[ProofTrieNode]) -> Vec<ProofTrieNodeV2> {
ProofTrieNodeV2::from_sorted_trie_nodes(
legacy_proofs.iter().map(|p| (p.path, p.node.clone(), p.masks)),
)
}
/// A test harness for comparing `ProofCalculator` and legacy `Proof` implementations.
///
/// This harness creates mock cursor factories from a `HashedPostState` and provides
@@ -1752,6 +1778,12 @@ mod tests {
let proof_legacy_result =
Proof::new(self.trie_cursor_factory.clone(), self.hashed_cursor_factory.clone())
.with_branch_node_masks(true)
.with_added_removed_keys(Some(
// This will force the HashBuilder to always retain the child branch of all
// extensions. We need this because in V2 extensions and branches are a
// single node type, so child branches are always included with extensions.
AddedRemovedKeys::default().with_assume_added(true),
))
.multiproof(legacy_targets)?;
// Helper function to check if a node path matches at least one target
@@ -1764,12 +1796,10 @@ mod tests {
})
};
// Decode and sort legacy proof nodes, filtering to only those that match at least one
// target
// Decode and sort legacy proof nodes
let proof_legacy_nodes = proof_legacy_result
.account_subtree
.iter()
.filter(|(path, _)| node_matches_target(path))
.map(|(path, node_enc)| {
let mut buf = node_enc.as_ref();
let node = TrieNode::decode(&mut buf)
@@ -1788,8 +1818,18 @@ mod tests {
.sorted_by(|a, b| depth_first::cmp(&a.path, &b.path))
.collect::<Vec<_>>();
// Convert legacy proofs to V2 proofs by combining extensions with their child branches
let proof_legacy_nodes_v2 = convert_legacy_proofs_to_v2(&proof_legacy_nodes);
// Filter to only keep nodes which match a target. We do this after conversion so we
// don't keep branches whose extension parents are excluded due to a min_len.
let proof_legacy_nodes_v2 = proof_legacy_nodes_v2
.into_iter()
.filter(|ProofTrieNodeV2 { path, .. }| node_matches_target(path))
.collect::<Vec<_>>();
// Basic comparison: both should succeed and produce identical results
pretty_assertions::assert_eq!(proof_legacy_nodes, proof_v2_result);
pretty_assertions::assert_eq!(proof_legacy_nodes_v2, proof_v2_result);
// Also test root_node - get a fresh calculator and verify it returns the root node
// that hashes to the expected root
@@ -1876,7 +1916,7 @@ mod tests {
}
proptest! {
#![proptest_config(ProptestConfig::with_cases(8000))]
#![proptest_config(ProptestConfig::with_cases(4000))]
#[test]
/// Tests that ProofCalculator produces valid proofs for randomly generated
/// HashedPostState with proof targets.

View File

@@ -1,10 +1,9 @@
use crate::proof_v2::DeferredValueEncoder;
use alloy_rlp::Encodable;
use alloy_trie::nodes::ExtensionNodeRef;
use reth_execution_errors::trie::StateProofError;
use reth_trie_common::{
BranchNode, BranchNodeMasks, ExtensionNode, LeafNode, LeafNodeRef, Nibbles, ProofTrieNode,
RlpNode, TrieMask, TrieNode,
BranchNodeMasks, BranchNodeV2, LeafNode, LeafNodeRef, Nibbles, ProofTrieNodeV2, RlpNode,
TrieMask, TrieNodeV2,
};
/// A trie node which is the child of a branch in the trie.
@@ -17,17 +16,10 @@ pub(crate) enum ProofTrieBranchChild<RF> {
/// The [`DeferredValueEncoder`] which will encode the leaf's value.
value: RF,
},
/// An extension node whose child branch has been converted to an [`RlpNode`]
Extension {
/// The short key of the leaf.
short_key: Nibbles,
/// The [`RlpNode`] of the child branch.
child: RlpNode,
},
/// A branch node whose children have already been flattened into [`RlpNode`]s.
Branch {
/// The node itself, for use during RLP encoding.
node: BranchNode,
node: BranchNodeV2,
/// Bitmasks carried over from cached `BranchNodeCompact` values, if any.
masks: Option<BranchNodeMasks>,
},
@@ -36,8 +28,10 @@ pub(crate) enum ProofTrieBranchChild<RF> {
}
impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
/// Converts this child into its RLP node representation. This potentially also returns an
/// `RlpNode` buffer which can be re-used for other [`ProofTrieBranchChild`]s.
/// Converts this child into its RLP node representation.
///
/// This potentially also returns an `RlpNode` buffer which can be re-used for other
/// [`ProofTrieBranchChild`]s.
pub(crate) fn into_rlp(
self,
buf: &mut Vec<u8>,
@@ -65,10 +59,6 @@ impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
LeafNodeRef::new(&short_key, value_buf).encode(&mut leaf_buf);
Ok((RlpNode::from_rlp(&buf[value_enc_len..]), None))
}
Self::Extension { short_key, child } => {
ExtensionNodeRef::new(&short_key, child.as_slice()).encode(buf);
Ok((RlpNode::from_rlp(buf), None))
}
Self::Branch { node: branch_node, .. } => {
branch_node.encode(buf);
Ok((RlpNode::from_rlp(buf), Some(branch_node.stack)))
@@ -77,7 +67,7 @@ impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
}
}
/// Converts this child into a [`ProofTrieNode`] having the given path.
/// Converts this child into a [`ProofTrieNodeV2`] having the given path.
///
/// # Panics
///
@@ -86,7 +76,7 @@ impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
self,
path: Nibbles,
buf: &mut Vec<u8>,
) -> Result<ProofTrieNode, StateProofError> {
) -> Result<ProofTrieNodeV2, StateProofError> {
let (node, masks) = match self {
Self::Leaf { short_key, value } => {
value.encode(buf)?;
@@ -98,24 +88,22 @@ impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
// this value, and the passed in buffer can remain with whatever large capacity it
// already has.
let rlp_val = buf.clone();
(TrieNode::Leaf(LeafNode::new(short_key, rlp_val)), None)
(TrieNodeV2::Leaf(LeafNode::new(short_key, rlp_val)), None)
}
Self::Extension { short_key, child } => {
(TrieNode::Extension(ExtensionNode { key: short_key, child }), None)
}
Self::Branch { node, masks } => (TrieNode::Branch(node), masks),
Self::Branch { node, masks } => (TrieNodeV2::Branch(node), masks),
Self::RlpNode(_) => panic!("Cannot call `into_proof_trie_node` on RlpNode"),
};
Ok(ProofTrieNode { node, path, masks })
Ok(ProofTrieNodeV2 { node, path, masks })
}
/// Returns the short key of the child, if it is a leaf or extension, or empty if its a
/// [`Self::Branch`] or [`Self::RlpNode`].
/// Returns the short key of the child, if it is a leaf or branch, or empty if its a
/// [`Self::RlpNode`].
pub(crate) fn short_key(&self) -> &Nibbles {
match self {
Self::Leaf { short_key, .. } | Self::Extension { short_key, .. } => short_key,
Self::Branch { .. } | Self::RlpNode(_) => {
Self::Leaf { short_key, .. } |
Self::Branch { node: BranchNodeV2 { key: short_key, .. }, .. } => short_key,
Self::RlpNode(_) => {
static EMPTY_NIBBLES: Nibbles = Nibbles::new();
&EMPTY_NIBBLES
}
@@ -134,14 +122,17 @@ impl<RF: DeferredValueEncoder> ProofTrieBranchChild<RF> {
/// - If the node is a [`Self::Branch`] or [`Self::RlpNode`]
pub(crate) fn trim_short_key_prefix(&mut self, len: usize) {
match self {
Self::Extension { short_key, child } if short_key.len() == len => {
*self = Self::RlpNode(core::mem::take(child));
}
Self::Leaf { short_key, .. } | Self::Extension { short_key, .. } => {
Self::Leaf { short_key, .. } => {
*short_key = trim_nibbles_prefix(short_key, len);
}
Self::Branch { .. } | Self::RlpNode(_) => {
panic!("Cannot call `trim_short_key_prefix` on Branch or RlpNode")
Self::Branch { node: BranchNodeV2 { key, branch_rlp_node, .. }, .. } => {
*key = trim_nibbles_prefix(key, len);
if key.is_empty() {
*branch_rlp_node = None;
}
}
Self::RlpNode(_) => {
panic!("Cannot call `trim_short_key_prefix` on RlpNode")
}
}
}

View File

@@ -6,36 +6,9 @@ use tracing::trace;
/// Compares two Nibbles in depth-first order.
///
/// In depth-first ordering:
/// - Descendants come before their ancestors (children before parents)
/// - Siblings are ordered lexicographically
///
/// # Example
///
/// ```text
/// 0x11 comes before 0x1 (child before parent)
/// 0x12 comes before 0x1 (child before parent)
/// 0x11 comes before 0x12 (lexicographical among siblings)
/// 0x1 comes before 0x21 (lexicographical among siblings)
/// Result: 0x11, 0x12, 0x1, 0x21
/// ```
/// See [`reth_trie_common::depth_first_cmp`] for details.
pub fn cmp(a: &Nibbles, b: &Nibbles) -> Ordering {
// If the two are of equal length, then compare them lexicographically
if a.len() == b.len() {
return a.cmp(b)
}
// If one is a prefix of the other, then the other comes first
let common_prefix_len = a.common_prefix_length(b);
if a.len() == common_prefix_len {
return Ordering::Greater
} else if b.len() == common_prefix_len {
return Ordering::Less
}
// Otherwise the nibble after the prefix determines the ordering. We know that neither is empty
// at this point, otherwise the previous if/else block would have caught it.
a.get_unchecked(common_prefix_len).cmp(&b.get_unchecked(common_prefix_len))
reth_trie_common::depth_first_cmp(a, b)
}
/// An iterator that traverses trie nodes in depth-first post-order.