fixed cache metrics

This commit is contained in:
Alexey Shekhirin
2025-12-22 11:39:11 +00:00
committed by Alexey Shekhirin
parent 63e9bf8a5f
commit 51966218ca
5 changed files with 219 additions and 36 deletions

7
Cargo.lock generated
View File

@@ -4037,6 +4037,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba59b6c98ba422a13f17ee1305c995cb5742bba7997f5b4d9af61b2ff0ffb213"
dependencies = [
"equivalent",
"typeid",
]
[[package]]
@@ -13336,6 +13337,12 @@ dependencies = [
"utf-8",
]
[[package]]
name = "typeid"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c"
[[package]]
name = "typenum"
version = "1.19.0"

View File

@@ -587,7 +587,7 @@ tracing-appender = "0.2"
url = { version = "2.3", default-features = false }
zstd = "0.13"
byteorder = "1"
fixed-cache = "0.1.1"
fixed-cache = { version = "0.1.3", features = ["stats"] }
moka = "0.12"
tar-no-std = { version = "0.3.2", default-features = false }
miniz_oxide = { version = "0.8.4", default-features = false }

View File

@@ -3,6 +3,7 @@ use alloy_primitives::{
map::{DefaultHashBuilder, FbBuildHasher},
Address, StorageKey, StorageValue, B256,
};
use fixed_cache::Stats;
use metrics::Gauge;
use reth_errors::ProviderResult;
use reth_metrics::Metrics;
@@ -16,7 +17,10 @@ use reth_trie::{
updates::TrieUpdates, AccountProof, HashedPostState, HashedStorage, MultiProof,
MultiProofTargets, StorageMultiProof, StorageProof, TrieInput,
};
use std::sync::Arc;
use std::sync::{
atomic::{AtomicU64, Ordering},
Arc,
};
use tracing::{debug_span, instrument, trace};
/// Type alias for the fixed-cache used for accounts and storage.
@@ -119,6 +123,113 @@ impl CachedStateMetrics {
}
}
/// Metrics for fixed-cache internal stats (hits/misses/collisions tracked by the cache itself).
#[derive(Metrics, Clone)]
#[metrics(scope = "sync.caching.fixed_cache")]
pub(crate) struct FixedCacheMetrics {
/// Code cache hits
code_hits: Gauge,
/// Code cache misses
code_misses: Gauge,
/// Code cache collisions
code_collisions: Gauge,
/// Storage cache hits
storage_hits: Gauge,
/// Storage cache misses
storage_misses: Gauge,
/// Storage cache collisions
storage_collisions: Gauge,
/// Account cache hits
account_hits: Gauge,
/// Account cache misses
account_misses: Gauge,
/// Account cache collisions
account_collisions: Gauge,
}
impl FixedCacheMetrics {
/// Returns a new zeroed-out instance of [`FixedCacheMetrics`].
pub(crate) fn zeroed() -> Self {
let zeroed = Self::default();
zeroed.reset();
zeroed
}
/// Sets all values to zero.
pub(crate) fn reset(&self) {
self.code_hits.set(0);
self.code_misses.set(0);
self.code_collisions.set(0);
self.storage_hits.set(0);
self.storage_misses.set(0);
self.storage_collisions.set(0);
self.account_hits.set(0);
self.account_misses.set(0);
self.account_collisions.set(0);
}
}
/// A generic stats handler for fixed-cache that tracks hits, misses, and collisions.
#[derive(Debug)]
pub(crate) struct CacheStatsHandler {
hits: AtomicU64,
misses: AtomicU64,
collisions: AtomicU64,
}
impl CacheStatsHandler {
/// Creates a new stats handler with all counters initialized to zero.
pub(crate) const fn new() -> Self {
Self { hits: AtomicU64::new(0), misses: AtomicU64::new(0), collisions: AtomicU64::new(0) }
}
/// Returns the number of cache hits.
pub(crate) fn hits(&self) -> u64 {
self.hits.load(Ordering::Relaxed)
}
/// Returns the number of cache misses.
pub(crate) fn misses(&self) -> u64 {
self.misses.load(Ordering::Relaxed)
}
/// Returns the number of cache collisions.
pub(crate) fn collisions(&self) -> u64 {
self.collisions.load(Ordering::Relaxed)
}
/// Resets all counters to zero.
pub(crate) fn reset(&self) {
self.hits.store(0, Ordering::Relaxed);
self.misses.store(0, Ordering::Relaxed);
self.collisions.store(0, Ordering::Relaxed);
}
}
impl<K, V> StatsHandler<K, V> for CacheStatsHandler {
fn on_hit(&self, _key: &K, _value: &V) {
self.hits.fetch_add(1, Ordering::Relaxed);
}
fn on_miss(&self, _key: AnyRef<'_>) {
self.misses.fetch_add(1, Ordering::Relaxed);
}
fn on_collision(&self, _new_key: AnyRef<'_>, _existing_key: &K, _existing_value: &V) {
self.collisions.fetch_add(1, Ordering::Relaxed);
}
}
impl<S: AccountReader> AccountReader for CachedStateProvider<S> {
fn basic_account(&self, address: &Address) -> ProviderResult<Option<Account>> {
if self.is_prewarm() {
@@ -333,6 +444,15 @@ pub(crate) struct ExecutionCache {
/// Cache for basic account information (nonce, balance, code hash).
account_cache: Arc<FixedCache<Address, Option<Account>, FbBuildHasher<20>>>,
/// Stats handler for the code cache.
code_stats: Arc<CacheStatsHandler>,
/// Stats handler for the storage cache.
storage_stats: Arc<CacheStatsHandler>,
/// Stats handler for the account cache.
account_stats: Arc<CacheStatsHandler>,
}
impl ExecutionCache {
@@ -475,6 +595,25 @@ impl ExecutionCache {
Ok(())
}
/// Updates the provided metrics with the current stats from the cache's stats handlers,
/// and resets the stats counters.
pub(crate) fn update_metrics(&self, metrics: &FixedCacheMetrics) {
metrics.code_hits.set(self.code_stats.hits() as f64);
metrics.code_misses.set(self.code_stats.misses() as f64);
metrics.code_collisions.set(self.code_stats.collisions() as f64);
self.code_stats.reset();
metrics.storage_hits.set(self.storage_stats.hits() as f64);
metrics.storage_misses.set(self.storage_stats.misses() as f64);
metrics.storage_collisions.set(self.storage_stats.collisions() as f64);
self.storage_stats.reset();
metrics.account_hits.set(self.account_stats.hits() as f64);
metrics.account_misses.set(self.account_stats.misses() as f64);
metrics.account_collisions.set(self.account_stats.collisions() as f64);
self.account_stats.reset();
}
}
/// A builder for [`ExecutionCache`].
@@ -493,19 +632,26 @@ pub(crate) struct ExecutionCacheBuilder {
impl ExecutionCacheBuilder {
/// Build an [`ExecutionCache`] struct, so that execution caches can be easily cloned.
pub(crate) fn build_caches(self, _total_cache_size: u64) -> ExecutionCache {
let code_stats = Arc::new(CacheStatsHandler::new());
let storage_stats = Arc::new(CacheStatsHandler::new());
let account_stats = Arc::new(CacheStatsHandler::new());
ExecutionCache {
code_cache: Arc::new(FixedCache::new(
self.code_cache_entries,
FbBuildHasher::<32>::default(),
)),
storage_cache: Arc::new(FixedCache::new(
self.storage_cache_entries,
DefaultHashBuilder::default(),
)),
account_cache: Arc::new(FixedCache::new(
self.account_cache_entries,
FbBuildHasher::<20>::default(),
)),
code_cache: Arc::new(
FixedCache::new(self.code_cache_entries, FbBuildHasher::<32>::default())
.with_stats(Some(Stats::new(Arc::clone(&code_stats)))),
),
storage_cache: Arc::new(
FixedCache::new(self.storage_cache_entries, DefaultHashBuilder::default())
.with_stats(Some(Stats::new(Arc::clone(&storage_stats)))),
),
account_cache: Arc::new(
FixedCache::new(self.account_cache_entries, FbBuildHasher::<20>::default())
.with_stats(Some(Stats::new(Arc::clone(&account_stats)))),
),
code_stats,
storage_stats,
account_stats,
}
}
}
@@ -534,6 +680,9 @@ pub(crate) struct SavedCache {
/// Metrics for the cached state provider
metrics: CachedStateMetrics,
/// Metrics for fixed-cache internal stats
fixed_cache_metrics: FixedCacheMetrics,
/// A guard to track in-flight usage of this cache.
/// The cache is considered available if the strong count is 1.
usage_guard: Arc<()>,
@@ -541,8 +690,13 @@ pub(crate) struct SavedCache {
impl SavedCache {
/// Creates a new instance with the internals
pub(super) fn new(hash: B256, caches: ExecutionCache, metrics: CachedStateMetrics) -> Self {
Self { hash, caches, metrics, usage_guard: Arc::new(()) }
pub(super) fn new(
hash: B256,
caches: ExecutionCache,
metrics: CachedStateMetrics,
fixed_cache_metrics: FixedCacheMetrics,
) -> Self {
Self { hash, caches, metrics, fixed_cache_metrics, usage_guard: Arc::new(()) }
}
/// Returns the hash for this cache
@@ -551,8 +705,8 @@ impl SavedCache {
}
/// Splits the cache into its caches and metrics, consuming it.
pub(crate) fn split(self) -> (ExecutionCache, CachedStateMetrics) {
(self.caches, self.metrics)
pub(crate) fn split(self) -> (ExecutionCache, CachedStateMetrics, FixedCacheMetrics) {
(self.caches, self.metrics, self.fixed_cache_metrics)
}
/// Returns true if the cache is available for use (no other tasks are currently using it).
@@ -575,10 +729,9 @@ impl SavedCache {
&self.metrics
}
/// Updates the metrics for the [`ExecutionCache`].
pub(crate) const fn update_metrics(&self) {
// fixed-cache doesn't provide entry_count, so we can't track size accurately.
// We could track inserts manually if needed.
/// Updates the fixed-cache metrics from the stats handlers.
pub(crate) fn update_metrics(&self) {
self.caches.update_metrics(&self.fixed_cache_metrics);
}
}
@@ -663,7 +816,12 @@ mod tests {
#[test]
fn test_saved_cache_is_available() {
let execution_cache = ExecutionCacheBuilder::default().build_caches(1000);
let cache = SavedCache::new(B256::ZERO, execution_cache, CachedStateMetrics::zeroed());
let cache = SavedCache::new(
B256::ZERO,
execution_cache,
CachedStateMetrics::zeroed(),
FixedCacheMetrics::zeroed(),
);
assert!(cache.is_available(), "Cache should be available initially");
@@ -675,8 +833,12 @@ mod tests {
#[test]
fn test_saved_cache_multiple_references() {
let execution_cache = ExecutionCacheBuilder::default().build_caches(1000);
let cache =
SavedCache::new(B256::from([2u8; 32]), execution_cache, CachedStateMetrics::zeroed());
let cache = SavedCache::new(
B256::from([2u8; 32]),
execution_cache,
CachedStateMetrics::zeroed(),
FixedCacheMetrics::zeroed(),
);
let guard1 = cache.clone_guard_for_test();
let guard2 = cache.clone_guard_for_test();

View File

@@ -4,7 +4,7 @@ use super::precompile_cache::PrecompileCacheMap;
use crate::tree::{
cached_state::{
CachedStateMetrics, CachedStateProvider, ExecutionCache as StateExecutionCache,
ExecutionCacheBuilder, SavedCache,
ExecutionCacheBuilder, FixedCacheMetrics, SavedCache,
},
payload_processor::{
prewarm::{PrewarmCacheTask, PrewarmContext, PrewarmMode, PrewarmTaskEvent},
@@ -476,7 +476,12 @@ where
} else {
debug!("creating new execution cache on cache miss");
let cache = ExecutionCacheBuilder::default().build_caches(self.cross_block_cache_size);
SavedCache::new(parent_hash, cache, CachedStateMetrics::zeroed())
SavedCache::new(
parent_hash,
cache,
CachedStateMetrics::zeroed(),
FixedCacheMetrics::zeroed(),
)
}
}
@@ -567,18 +572,22 @@ where
}
// Take existing cache (if any) or create fresh caches
let (caches, cache_metrics) = match cached.take() {
Some(existing) => {
existing.split()
}
let (caches, cache_metrics, fixed_cache_metrics) = match cached.take() {
Some(existing) => existing.split(),
None => (
ExecutionCacheBuilder::default().build_caches(self.cross_block_cache_size),
CachedStateMetrics::zeroed(),
FixedCacheMetrics::zeroed(),
),
};
// Insert the block's bundle state into cache
let new_cache = SavedCache::new(block_with_parent.block.hash, caches, cache_metrics);
let new_cache = SavedCache::new(
block_with_parent.block.hash,
caches,
cache_metrics,
fixed_cache_metrics,
);
if new_cache.cache().insert_state(bundle_state).is_err() {
*cached = None;
debug!(target: "engine::caching", "cleared execution cache on update error");
@@ -861,7 +870,7 @@ where
mod tests {
use super::ExecutionCache;
use crate::tree::{
cached_state::{CachedStateMetrics, ExecutionCacheBuilder, SavedCache},
cached_state::{CachedStateMetrics, ExecutionCacheBuilder, FixedCacheMetrics, SavedCache},
payload_processor::{
evm_state_to_hashed_post_state, executor::WorkloadExecutor, PayloadProcessor,
},
@@ -891,7 +900,12 @@ mod tests {
fn make_saved_cache(hash: B256) -> SavedCache {
let execution_cache = ExecutionCacheBuilder::default().build_caches(1_000);
SavedCache::new(hash, execution_cache, CachedStateMetrics::zeroed())
SavedCache::new(
hash,
execution_cache,
CachedStateMetrics::zeroed(),
FixedCacheMetrics::zeroed(),
)
}
#[test]

View File

@@ -272,8 +272,8 @@ where
execution_cache.update_with_guard(|cached| {
// consumes the `SavedCache` held by the prewarming task, which releases its usage
// guard
let (caches, cache_metrics) = saved_cache.split();
let new_cache = SavedCache::new(hash, caches, cache_metrics);
let (caches, cache_metrics, fixed_cache_metrics) = saved_cache.split();
let new_cache = SavedCache::new(hash, caches, cache_metrics, fixed_cache_metrics);
// Insert state into cache while holding the lock
// Access the BundleState through the shared ExecutionOutcome