mirror of
https://github.com/paradigmxyz/reth.git
synced 2026-01-08 23:08:19 -05:00
feat(engine): cli argument to disable state cache (#20143)
This commit is contained in:
@@ -91,6 +91,8 @@ pub struct TreeConfig {
|
||||
/// Whether to always compare trie updates from the state root task to the trie updates from
|
||||
/// the regular state root calculation.
|
||||
always_compare_trie_updates: bool,
|
||||
/// Whether to disable state cache.
|
||||
disable_state_cache: bool,
|
||||
/// Whether to disable parallel prewarming.
|
||||
disable_prewarming: bool,
|
||||
/// Whether to disable the parallel sparse trie state root algorithm.
|
||||
@@ -145,6 +147,7 @@ impl Default for TreeConfig {
|
||||
max_execute_block_batch_size: DEFAULT_MAX_EXECUTE_BLOCK_BATCH_SIZE,
|
||||
legacy_state_root: false,
|
||||
always_compare_trie_updates: false,
|
||||
disable_state_cache: false,
|
||||
disable_prewarming: false,
|
||||
disable_parallel_sparse_trie: false,
|
||||
state_provider_metrics: false,
|
||||
@@ -175,6 +178,7 @@ impl TreeConfig {
|
||||
max_execute_block_batch_size: usize,
|
||||
legacy_state_root: bool,
|
||||
always_compare_trie_updates: bool,
|
||||
disable_state_cache: bool,
|
||||
disable_prewarming: bool,
|
||||
disable_parallel_sparse_trie: bool,
|
||||
state_provider_metrics: bool,
|
||||
@@ -199,6 +203,7 @@ impl TreeConfig {
|
||||
max_execute_block_batch_size,
|
||||
legacy_state_root,
|
||||
always_compare_trie_updates,
|
||||
disable_state_cache,
|
||||
disable_prewarming,
|
||||
disable_parallel_sparse_trie,
|
||||
state_provider_metrics,
|
||||
@@ -273,7 +278,12 @@ impl TreeConfig {
|
||||
self.disable_parallel_sparse_trie
|
||||
}
|
||||
|
||||
/// Returns whether or not parallel prewarming should be used.
|
||||
/// Returns whether or not state cache is disabled.
|
||||
pub const fn disable_state_cache(&self) -> bool {
|
||||
self.disable_state_cache
|
||||
}
|
||||
|
||||
/// Returns whether or not parallel prewarming is disabled.
|
||||
pub const fn disable_prewarming(&self) -> bool {
|
||||
self.disable_prewarming
|
||||
}
|
||||
@@ -365,6 +375,12 @@ impl TreeConfig {
|
||||
self
|
||||
}
|
||||
|
||||
/// Setter for whether to disable state cache.
|
||||
pub const fn without_state_cache(mut self, disable_state_cache: bool) -> Self {
|
||||
self.disable_state_cache = disable_state_cache;
|
||||
self
|
||||
}
|
||||
|
||||
/// Setter for whether to disable parallel prewarming.
|
||||
pub const fn without_prewarming(mut self, disable_prewarming: bool) -> Self {
|
||||
self.disable_prewarming = disable_prewarming;
|
||||
|
||||
@@ -106,6 +106,8 @@ where
|
||||
cross_block_cache_size: u64,
|
||||
/// Whether transactions should not be executed on prewarming task.
|
||||
disable_transaction_prewarming: bool,
|
||||
/// Whether state cache should be disable
|
||||
disable_state_cache: bool,
|
||||
/// Determines how to configure the evm for execution.
|
||||
evm_config: Evm,
|
||||
/// Whether precompile cache should be disabled.
|
||||
@@ -149,6 +151,7 @@ where
|
||||
cross_block_cache_size: config.cross_block_cache_size(),
|
||||
disable_transaction_prewarming: config.disable_prewarming(),
|
||||
evm_config,
|
||||
disable_state_cache: config.disable_state_cache(),
|
||||
precompile_cache_disabled: config.precompile_cache_disabled(),
|
||||
precompile_cache_map,
|
||||
sparse_state_trie: Arc::default(),
|
||||
@@ -382,9 +385,15 @@ where
|
||||
transactions = mpsc::channel().1;
|
||||
}
|
||||
|
||||
let saved_cache = self.cache_for(env.parent_hash);
|
||||
let cache = saved_cache.cache().clone();
|
||||
let cache_metrics = saved_cache.metrics().clone();
|
||||
let (saved_cache, cache, cache_metrics) = if self.disable_state_cache {
|
||||
(None, None, None)
|
||||
} else {
|
||||
let saved_cache = self.cache_for(env.parent_hash);
|
||||
let cache = saved_cache.cache().clone();
|
||||
let cache_metrics = saved_cache.metrics().clone();
|
||||
(Some(saved_cache), Some(cache), Some(cache_metrics))
|
||||
};
|
||||
|
||||
// configure prewarming
|
||||
let prewarm_ctx = PrewarmContext {
|
||||
env,
|
||||
@@ -596,12 +605,12 @@ impl<Tx, Err> PayloadHandle<Tx, Err> {
|
||||
}
|
||||
|
||||
/// Returns a clone of the caches used by prewarming
|
||||
pub(super) fn caches(&self) -> StateExecutionCache {
|
||||
pub(super) fn caches(&self) -> Option<StateExecutionCache> {
|
||||
self.prewarm_handle.cache.clone()
|
||||
}
|
||||
|
||||
/// Returns a clone of the cache metrics used by prewarming
|
||||
pub(super) fn cache_metrics(&self) -> CachedStateMetrics {
|
||||
pub(super) fn cache_metrics(&self) -> Option<CachedStateMetrics> {
|
||||
self.prewarm_handle.cache_metrics.clone()
|
||||
}
|
||||
|
||||
@@ -631,9 +640,9 @@ impl<Tx, Err> PayloadHandle<Tx, Err> {
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct CacheTaskHandle {
|
||||
/// The shared cache the task operates with.
|
||||
cache: StateExecutionCache,
|
||||
cache: Option<StateExecutionCache>,
|
||||
/// Metrics for the caches
|
||||
cache_metrics: CachedStateMetrics,
|
||||
cache_metrics: Option<CachedStateMetrics>,
|
||||
/// Channel to the spawned prewarm task if any
|
||||
to_prewarm_task: Option<std::sync::mpsc::Sender<PrewarmTaskEvent>>,
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@ use metrics::{Counter, Gauge, Histogram};
|
||||
use reth_evm::{execute::ExecutableTxFor, ConfigureEvm, Evm, EvmFor, SpecFor};
|
||||
use reth_metrics::Metrics;
|
||||
use reth_primitives_traits::NodePrimitives;
|
||||
use reth_provider::{BlockReader, StateProviderFactory, StateReader};
|
||||
use reth_provider::{BlockReader, StateProviderBox, StateProviderFactory, StateReader};
|
||||
use reth_revm::{database::StateProviderDatabase, db::BundleState, state::EvmState};
|
||||
use reth_trie::MultiProofTargets;
|
||||
use std::{
|
||||
@@ -255,31 +255,35 @@ where
|
||||
self;
|
||||
let hash = env.hash;
|
||||
|
||||
debug!(target: "engine::caching", parent_hash=?hash, "Updating execution cache");
|
||||
// Perform all cache operations atomically under the lock
|
||||
execution_cache.update_with_guard(|cached| {
|
||||
// consumes the `SavedCache` held by the prewarming task, which releases its usage guard
|
||||
let (caches, cache_metrics) = saved_cache.split();
|
||||
let new_cache = SavedCache::new(hash, caches, cache_metrics);
|
||||
if let Some(saved_cache) = saved_cache {
|
||||
debug!(target: "engine::caching", parent_hash=?hash, "Updating execution cache");
|
||||
// Perform all cache operations atomically under the lock
|
||||
execution_cache.update_with_guard(|cached| {
|
||||
// consumes the `SavedCache` held by the prewarming task, which releases its usage
|
||||
// guard
|
||||
let (caches, cache_metrics) = saved_cache.split();
|
||||
let new_cache = SavedCache::new(hash, caches, cache_metrics);
|
||||
|
||||
// Insert state into cache while holding the lock
|
||||
if new_cache.cache().insert_state(&state).is_err() {
|
||||
// Clear the cache on error to prevent having a polluted cache
|
||||
*cached = None;
|
||||
debug!(target: "engine::caching", "cleared execution cache on update error");
|
||||
return;
|
||||
}
|
||||
// Insert state into cache while holding the lock
|
||||
if new_cache.cache().insert_state(&state).is_err() {
|
||||
// Clear the cache on error to prevent having a polluted cache
|
||||
*cached = None;
|
||||
debug!(target: "engine::caching", "cleared execution cache on update error");
|
||||
return;
|
||||
}
|
||||
|
||||
new_cache.update_metrics();
|
||||
new_cache.update_metrics();
|
||||
|
||||
// Replace the shared cache with the new one; the previous cache (if any) is dropped.
|
||||
*cached = Some(new_cache);
|
||||
});
|
||||
// Replace the shared cache with the new one; the previous cache (if any) is
|
||||
// dropped.
|
||||
*cached = Some(new_cache);
|
||||
});
|
||||
|
||||
let elapsed = start.elapsed();
|
||||
debug!(target: "engine::caching", parent_hash=?hash, elapsed=?elapsed, "Updated execution cache");
|
||||
let elapsed = start.elapsed();
|
||||
debug!(target: "engine::caching", parent_hash=?hash, elapsed=?elapsed, "Updated execution cache");
|
||||
|
||||
metrics.cache_saving_duration.set(elapsed.as_secs_f64());
|
||||
metrics.cache_saving_duration.set(elapsed.as_secs_f64());
|
||||
}
|
||||
}
|
||||
|
||||
/// Executes the task.
|
||||
@@ -356,7 +360,7 @@ where
|
||||
{
|
||||
pub(super) env: ExecutionEnv<Evm>,
|
||||
pub(super) evm_config: Evm,
|
||||
pub(super) saved_cache: SavedCache,
|
||||
pub(super) saved_cache: Option<SavedCache>,
|
||||
/// Provider to obtain the state
|
||||
pub(super) provider: StateProviderBuilder<N, P>,
|
||||
pub(super) metrics: PrewarmMetrics,
|
||||
@@ -400,10 +404,13 @@ where
|
||||
};
|
||||
|
||||
// Use the caches to create a new provider with caching
|
||||
let caches = saved_cache.cache().clone();
|
||||
let cache_metrics = saved_cache.metrics().clone();
|
||||
let state_provider =
|
||||
CachedStateProvider::new_with_caches(state_provider, caches, cache_metrics);
|
||||
let state_provider: StateProviderBox = if let Some(saved_cache) = saved_cache {
|
||||
let caches = saved_cache.cache().clone();
|
||||
let cache_metrics = saved_cache.metrics().clone();
|
||||
Box::new(CachedStateProvider::new_with_caches(state_provider, caches, cache_metrics))
|
||||
} else {
|
||||
state_provider
|
||||
};
|
||||
|
||||
let state_provider = StateProviderDatabase::new(state_provider);
|
||||
|
||||
|
||||
@@ -369,7 +369,7 @@ where
|
||||
)
|
||||
.into())
|
||||
};
|
||||
let state_provider = ensure_ok!(provider_builder.build());
|
||||
let mut state_provider = ensure_ok!(provider_builder.build());
|
||||
drop(_enter);
|
||||
|
||||
// fetch parent block
|
||||
@@ -412,11 +412,13 @@ where
|
||||
|
||||
// Use cached state provider before executing, used in execution after prewarming threads
|
||||
// complete
|
||||
let state_provider = CachedStateProvider::new_with_caches(
|
||||
state_provider,
|
||||
handle.caches(),
|
||||
handle.cache_metrics(),
|
||||
);
|
||||
if let Some((caches, cache_metrics)) = handle.caches().zip(handle.cache_metrics()) {
|
||||
state_provider = Box::new(CachedStateProvider::new_with_caches(
|
||||
state_provider,
|
||||
caches,
|
||||
cache_metrics,
|
||||
));
|
||||
};
|
||||
|
||||
// Execute the block and handle any execution errors
|
||||
let (output, senders) = match if self.config.state_provider_metrics() {
|
||||
|
||||
@@ -20,6 +20,7 @@ pub struct DefaultEngineValues {
|
||||
persistence_threshold: u64,
|
||||
memory_block_buffer_target: u64,
|
||||
legacy_state_root_task_enabled: bool,
|
||||
state_cache_disabled: bool,
|
||||
prewarming_disabled: bool,
|
||||
parallel_sparse_trie_disabled: bool,
|
||||
state_provider_metrics: bool,
|
||||
@@ -66,6 +67,12 @@ impl DefaultEngineValues {
|
||||
self
|
||||
}
|
||||
|
||||
/// Set whether to disable state cache by default
|
||||
pub const fn with_state_cache_disabled(mut self, v: bool) -> Self {
|
||||
self.state_cache_disabled = v;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set whether to disable prewarming by default
|
||||
pub const fn with_prewarming_disabled(mut self, v: bool) -> Self {
|
||||
self.prewarming_disabled = v;
|
||||
@@ -166,6 +173,7 @@ impl Default for DefaultEngineValues {
|
||||
persistence_threshold: DEFAULT_PERSISTENCE_THRESHOLD,
|
||||
memory_block_buffer_target: DEFAULT_MEMORY_BLOCK_BUFFER_TARGET,
|
||||
legacy_state_root_task_enabled: false,
|
||||
state_cache_disabled: false,
|
||||
prewarming_disabled: false,
|
||||
parallel_sparse_trie_disabled: false,
|
||||
state_provider_metrics: false,
|
||||
@@ -212,6 +220,10 @@ pub struct EngineArgs {
|
||||
#[deprecated]
|
||||
pub caching_and_prewarming_enabled: bool,
|
||||
|
||||
/// Disable state cache
|
||||
#[arg(long = "engine.disable-state-cache", default_value_t = DefaultEngineValues::get_global().state_cache_disabled)]
|
||||
pub state_cache_disabled: bool,
|
||||
|
||||
/// Disable parallel prewarming
|
||||
#[arg(long = "engine.disable-prewarming", alias = "engine.disable-caching-and-prewarming", default_value_t = DefaultEngineValues::get_global().prewarming_disabled)]
|
||||
pub prewarming_disabled: bool,
|
||||
@@ -305,6 +317,7 @@ impl Default for EngineArgs {
|
||||
persistence_threshold,
|
||||
memory_block_buffer_target,
|
||||
legacy_state_root_task_enabled,
|
||||
state_cache_disabled,
|
||||
prewarming_disabled,
|
||||
parallel_sparse_trie_disabled,
|
||||
state_provider_metrics,
|
||||
@@ -327,6 +340,7 @@ impl Default for EngineArgs {
|
||||
legacy_state_root_task_enabled,
|
||||
state_root_task_compare_updates,
|
||||
caching_and_prewarming_enabled: true,
|
||||
state_cache_disabled,
|
||||
prewarming_disabled,
|
||||
parallel_sparse_trie_enabled: true,
|
||||
parallel_sparse_trie_disabled,
|
||||
@@ -354,6 +368,7 @@ impl EngineArgs {
|
||||
.with_persistence_threshold(self.persistence_threshold)
|
||||
.with_memory_block_buffer_target(self.memory_block_buffer_target)
|
||||
.with_legacy_state_root(self.legacy_state_root_task_enabled)
|
||||
.without_state_cache(self.state_cache_disabled)
|
||||
.without_prewarming(self.prewarming_disabled)
|
||||
.with_disable_parallel_sparse_trie(self.parallel_sparse_trie_disabled)
|
||||
.with_state_provider_metrics(self.state_provider_metrics)
|
||||
@@ -408,6 +423,7 @@ mod tests {
|
||||
memory_block_buffer_target: 50,
|
||||
legacy_state_root_task_enabled: true,
|
||||
caching_and_prewarming_enabled: true,
|
||||
state_cache_disabled: true,
|
||||
prewarming_disabled: true,
|
||||
parallel_sparse_trie_enabled: true,
|
||||
parallel_sparse_trie_disabled: true,
|
||||
@@ -434,6 +450,7 @@ mod tests {
|
||||
"--engine.memory-block-buffer-target",
|
||||
"50",
|
||||
"--engine.legacy-state-root",
|
||||
"--engine.disable-state-cache",
|
||||
"--engine.disable-prewarming",
|
||||
"--engine.disable-parallel-sparse-trie",
|
||||
"--engine.state-provider-metrics",
|
||||
|
||||
@@ -897,6 +897,9 @@ Engine:
|
||||
--engine.legacy-state-root
|
||||
Enable legacy state root
|
||||
|
||||
--engine.disable-state-cache
|
||||
Disable state cache
|
||||
|
||||
--engine.disable-prewarming
|
||||
Disable parallel prewarming
|
||||
|
||||
|
||||
@@ -897,6 +897,9 @@ Engine:
|
||||
--engine.legacy-state-root
|
||||
Enable legacy state root
|
||||
|
||||
--engine.disable-state-cache
|
||||
Disable state cache
|
||||
|
||||
--engine.disable-prewarming
|
||||
Disable parallel prewarming
|
||||
|
||||
|
||||
Reference in New Issue
Block a user