perf: reduce update_leaves key cloning (#22228)

This commit is contained in:
YK
2026-02-16 16:34:21 +08:00
committed by GitHub
parent 997af404a5
commit 74abad29ad

View File

@@ -1322,14 +1322,12 @@ impl SparseTrie for ParallelSparseTrie {
) -> SparseTrieResult<()> {
use crate::{provider::NoRevealProvider, LeafUpdate};
// Collect keys upfront since we mutate `updates` during iteration.
// On success, entries are removed; on blinded node failure, they're re-inserted.
let keys: Vec<B256> = updates.keys().copied().collect();
// Drain updates to avoid cloning keys while preserving the map's allocation.
// On success, entries remain removed; on blinded node failure, they're re-inserted.
let drained: Vec<_> = updates.drain().collect();
for key in keys {
for (key, update) in drained {
let full_path = Nibbles::unpack(key);
// Remove upfront - we'll re-insert if the operation fails due to blinded node.
let update = updates.remove(&key).unwrap();
match update {
LeafUpdate::Changed(value) => {