refactor(trie): don't bother about recycling subtries (#23115)

This commit is contained in:
Sergei Shulepov
2026-03-19 19:16:21 +07:00
committed by GitHub
parent bd476289fa
commit 1e17f7cf67

View File

@@ -22,7 +22,7 @@ use reth_trie_common::{
BranchNodeMasks, BranchNodeRef, ExtensionNodeRef, LeafNodeRef, Nibbles, ProofTrieNodeV2,
RlpNode, TrieNodeV2, EMPTY_ROOT_HASH,
};
use slotmap::{DefaultKey, Key as _, SlotMap};
use slotmap::{DefaultKey, SlotMap};
use smallvec::SmallVec;
use tracing::{instrument, trace};
@@ -156,6 +156,27 @@ struct ArenaSparseSubtrie {
}
impl ArenaSparseSubtrie {
/// Creates a new subtrie with a pre-allocated root slot containing
/// [`ArenaSparseNode::EmptyRoot`]. The caller must overwrite `subtrie.arena[subtrie.root]`
/// before use.
fn new(record_updates: bool) -> Box<Self> {
let mut arena = SlotMap::new();
let root = arena.insert(ArenaSparseNode::EmptyRoot);
let buffers = ArenaTrieBuffers {
updates: record_updates.then(SparseTrieUpdates::default),
..Default::default()
};
Box::new(Self {
arena,
root,
path: Nibbles::default(),
buffers,
required_proofs: Vec::new(),
num_leaves: 0,
num_dirty_leaves: 0,
})
}
/// Asserts that `num_leaves` and `num_dirty_leaves` match the actual counts in the arena.
#[cfg(debug_assertions)]
fn debug_assert_counters(&self) {
@@ -173,14 +194,6 @@ impl ArenaSparseSubtrie {
);
}
fn clear(&mut self) {
self.arena = SlotMap::new();
self.buffers.clear();
self.required_proofs.clear();
self.num_leaves = 0;
self.num_dirty_leaves = 0;
}
/// Prunes revealed subtrees that are not ancestors of any retained leaf.
///
/// `retained_leaves` must be sorted and scoped to this subtrie's key range. The method
@@ -548,11 +561,6 @@ impl Default for ArenaParallelismThresholds {
/// Pruned nodes are replaced with `ArenaSparseNodeBranchChild::Blinded` entries using their
/// cached RLP. Subtries are pruned in parallel when their leaf count exceeds
/// [`ArenaParallelismThresholds::min_leaves_for_prune`].
///
/// ## Subtrie Recycling
///
/// Cleared subtries are pooled in `cleared_subtries` and reused by
/// `take_or_create_cleared_subtrie` to avoid repeated arena allocations.
#[derive(Debug, Clone)]
pub struct ArenaParallelSparseTrie {
/// The arena allocating nodes in the upper trie.
@@ -563,9 +571,6 @@ pub struct ArenaParallelSparseTrie {
updates: Option<SparseTrieUpdates>,
/// Reusable buffers for traversal, RLP encoding, and update actions.
buffers: ArenaTrieBuffers,
/// Pool of cleared `ArenaSparseSubtrie`s available for reuse.
#[allow(clippy::vec_box)]
cleared_subtries: Vec<Box<ArenaSparseSubtrie>>,
/// Thresholds controlling when parallelism is enabled for different operations.
parallelism_thresholds: ArenaParallelismThresholds,
/// Debug recorder for tracking mutating operations.
@@ -583,14 +588,6 @@ impl ArenaParallelSparseTrie {
self
}
/// Returns the arena indexes of all [`ArenaSparseNode::Subtrie`] nodes in the upper arena.
fn all_subtries(&self) -> SmallVec<[Index; 16]> {
self.upper_arena
.iter()
.filter_map(|(idx, node)| matches!(node, ArenaSparseNode::Subtrie(_)).then_some(idx))
.collect()
}
/// Resets the debug recorder and records the current trie state as `SetRoot` + `RevealNodes`
/// ops, representing the initial state at the beginning of a block (after pruning).
///
@@ -739,31 +736,6 @@ impl ArenaParallelSparseTrie {
}
}
/// Takes a cleared [`ArenaSparseSubtrie`] from the pool (or creates a new one) and
/// pre-allocates a root slot with a placeholder. The caller must overwrite
/// `subtrie.arena[subtrie.root]` before use.
fn take_or_create_cleared_subtrie(&mut self) -> Box<ArenaSparseSubtrie> {
let mut subtrie = if let Some(s) = self.cleared_subtries.pop() {
debug_assert!(s.arena.is_empty());
s
} else {
Box::new(ArenaSparseSubtrie {
arena: SlotMap::new(),
root: Index::null(),
path: Nibbles::default(),
buffers: ArenaTrieBuffers::default(),
required_proofs: Vec::new(),
num_leaves: 0,
num_dirty_leaves: 0,
})
};
subtrie.root = subtrie.arena.insert(ArenaSparseNode::EmptyRoot);
if self.updates.is_some() {
subtrie.buffers.updates.get_or_insert_with(SparseTrieUpdates::default).clear();
}
subtrie
}
/// Returns `true` if a node at the given path length should be placed in a subtrie rather
/// than the upper arena.
const fn should_be_subtrie(path_len: usize) -> bool {
@@ -788,7 +760,7 @@ impl ArenaParallelSparseTrie {
}
trace!(target: TRACE_TARGET, ?child_path, "Wrapping child into subtrie");
let mut subtrie = self.take_or_create_cleared_subtrie();
let mut subtrie = ArenaSparseSubtrie::new(self.updates.is_some());
subtrie.path = *child_path;
let mut root_node =
mem::replace(&mut self.upper_arena[child_idx], ArenaSparseNode::TakenSubtrie);
@@ -891,8 +863,7 @@ impl ArenaParallelSparseTrie {
self.maybe_collapse_or_remove_branch(cursor);
}
/// Merges buffered updates from a [`ArenaSparseNode::Subtrie`], clears it, and pushes it
/// onto `cleared_subtries` for reuse.
/// Merges buffered updates from a [`ArenaSparseNode::Subtrie`] and drops it.
///
/// # Panics
///
@@ -902,8 +873,6 @@ impl ArenaParallelSparseTrie {
unreachable!("recycle_subtrie called on non-Subtrie node")
};
Self::merge_subtrie_updates(&mut self.buffers.updates, &mut subtrie.buffers.updates);
subtrie.clear();
self.cleared_subtries.push(subtrie);
}
/// Removes a [`ArenaSparseNode::Subtrie`] from the upper arena at `idx` and recycles it.
@@ -1029,8 +998,6 @@ impl ArenaParallelSparseTrie {
&mut self.buffers.updates,
&mut subtrie.buffers.updates,
);
subtrie.clear();
self.cleared_subtries.push(subtrie);
// The migrated subtrie root may be a branch whose children now live in
// the upper arena at or beyond the subtrie boundary depth. Re-wrap any
@@ -2170,7 +2137,6 @@ impl Default for ArenaParallelSparseTrie {
root,
updates: None,
buffers: ArenaTrieBuffers::default(),
cleared_subtries: Vec::new(),
parallelism_thresholds: ArenaParallelismThresholds::default(),
#[cfg(feature = "trie-debug")]
debug_recorder: Default::default(),
@@ -2586,14 +2552,6 @@ impl SparseTrie for ArenaParallelSparseTrie {
#[cfg(feature = "trie-debug")]
self.debug_recorder.reset();
for idx in self.all_subtries() {
if let ArenaSparseNode::Subtrie(mut subtrie) =
self.upper_arena.remove(idx).expect("subtrie exists in arena")
{
subtrie.clear();
self.cleared_subtries.push(subtrie);
}
}
self.upper_arena = SlotMap::new();
self.root = self.upper_arena.insert(ArenaSparseNode::EmptyRoot);
if let Some(updates) = self.updates.as_mut() {
@@ -2602,16 +2560,8 @@ impl SparseTrie for ArenaParallelSparseTrie {
self.buffers.clear();
}
fn shrink_nodes_to(&mut self, size: usize) {
// We do not shrink the upper trie for now.
//
// As soon as a trie rotates from a live subtrie into the
// cleared_subtrie it will be properly shrunk.
for s in &mut self.cleared_subtries {
if s.arena.capacity() > size {
s.arena = SlotMap::with_capacity(size);
}
}
fn shrink_nodes_to(&mut self, _size: usize) {
// Subtries are recreated from scratch on each block, so there is nothing to shrink.
}
fn shrink_values_to(&mut self, _size: usize) {
@@ -2645,15 +2595,11 @@ impl SparseTrie for ArenaParallelSparseTrie {
})
.sum();
// Cleared subtries still hold allocated arena capacity.
let cleared_size: usize =
self.cleared_subtries.iter().map(|s| s.arena.capacity() * node_size).sum();
// RLP buffers.
let buffer_size = self.buffers.rlp_buf.capacity() +
self.buffers.rlp_node_buf.capacity() * core::mem::size_of::<RlpNode>();
upper + subtrie_size + cleared_size + buffer_size
upper + subtrie_size + buffer_size
}
#[instrument(