mirror of
https://github.com/paradigmxyz/reth.git
synced 2026-02-19 03:04:27 -05:00
perf: use separate pool for save_blocks (#21764)
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
//! Test case definitions
|
||||
|
||||
use crate::result::{CaseResult, Error};
|
||||
use rayon::iter::{IntoParallelRefIterator, ParallelIterator};
|
||||
use rayon::prelude::*;
|
||||
use std::{
|
||||
fmt::Debug,
|
||||
path::{Path, PathBuf},
|
||||
@@ -10,7 +10,7 @@ use std::{
|
||||
/// A single test case, capable of loading a JSON description of itself and running it.
|
||||
///
|
||||
/// See <https://ethereum-tests.readthedocs.io/> for test specs.
|
||||
pub trait Case: Debug + Sync + Sized {
|
||||
pub trait Case: Debug + Send + Sync + Sized + 'static {
|
||||
/// A description of the test.
|
||||
fn description(&self) -> String {
|
||||
"no description".to_string()
|
||||
@@ -22,7 +22,7 @@ pub trait Case: Debug + Sync + Sized {
|
||||
fn load(path: &Path) -> Result<Self, Error>;
|
||||
|
||||
/// Run the test.
|
||||
fn run(&self) -> Result<(), Error>;
|
||||
fn run(self) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
/// A container for multiple test cases.
|
||||
@@ -34,10 +34,10 @@ pub struct Cases<T> {
|
||||
|
||||
impl<T: Case> Cases<T> {
|
||||
/// Run the contained test cases.
|
||||
pub fn run(&self) -> Vec<CaseResult> {
|
||||
pub fn run(self) -> Vec<CaseResult> {
|
||||
self.test_cases
|
||||
.par_iter()
|
||||
.map(|(path, case)| CaseResult::new(path, case, case.run()))
|
||||
.into_par_iter()
|
||||
.map(|(path, case)| CaseResult::new(&path, case.description(), case.run()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
Case, Error, Suite,
|
||||
};
|
||||
use alloy_rlp::{Decodable, Encodable};
|
||||
use rayon::iter::ParallelIterator;
|
||||
use rayon::iter::{IndexedParallelIterator, ParallelIterator};
|
||||
use reth_chainspec::ChainSpec;
|
||||
use reth_consensus::{Consensus, HeaderValidator};
|
||||
use reth_db_common::init::{insert_genesis_hashes, insert_genesis_history, insert_genesis_state};
|
||||
@@ -172,7 +172,7 @@ impl Case for BlockchainTestCase {
|
||||
///
|
||||
/// # Errors
|
||||
/// Returns an error if the test is flagged for skipping or encounters issues during execution.
|
||||
fn run(&self) -> Result<(), Error> {
|
||||
fn run(self) -> Result<(), Error> {
|
||||
// If the test is marked for skipping, return a Skipped error immediately.
|
||||
if self.skip {
|
||||
return Err(Error::Skipped);
|
||||
@@ -180,12 +180,11 @@ impl Case for BlockchainTestCase {
|
||||
|
||||
// Iterate through test cases, filtering by the network type to exclude specific forks.
|
||||
self.tests
|
||||
.iter()
|
||||
.into_iter()
|
||||
.filter(|(_, case)| !Self::excluded_fork(case.network))
|
||||
.par_bridge_buffered()
|
||||
.try_for_each(|(name, case)| Self::run_single_case(name, case).map(|_| ()))?;
|
||||
|
||||
Ok(())
|
||||
.with_min_len(64)
|
||||
.try_for_each(|(name, case)| Self::run_single_case(&name, &case).map(|_| ()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -207,7 +206,7 @@ fn run_case(
|
||||
case: &BlockchainTest,
|
||||
) -> Result<Vec<(RecoveredBlock<Block>, ExecutionWitness)>, Error> {
|
||||
// Create a new test database and initialize a provider for the test case.
|
||||
let chain_spec: Arc<ChainSpec> = Arc::new(case.network.into());
|
||||
let chain_spec = case.network.to_chain_spec();
|
||||
let factory = create_test_provider_factory_with_chain_spec(chain_spec.clone());
|
||||
let provider = factory.database_provider_rw().unwrap();
|
||||
|
||||
|
||||
@@ -8,8 +8,13 @@ use alloy_primitives::{keccak256, Address, Bloom, Bytes, B256, B64, U256};
|
||||
use reth_chainspec::{ChainSpec, ChainSpecBuilder, EthereumHardfork, ForkCondition};
|
||||
use reth_db_api::{cursor::DbDupCursorRO, tables, transaction::DbTx};
|
||||
use reth_primitives_traits::SealedHeader;
|
||||
use revm::primitives::HashMap;
|
||||
use serde::Deserialize;
|
||||
use std::{collections::BTreeMap, ops::Deref};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
ops::Deref,
|
||||
sync::{Arc, OnceLock, RwLock},
|
||||
};
|
||||
|
||||
/// The definition of a blockchain test.
|
||||
#[derive(Debug, PartialEq, Eq, Deserialize)]
|
||||
@@ -321,59 +326,72 @@ pub enum ForkSpec {
|
||||
Osaka,
|
||||
}
|
||||
|
||||
impl From<ForkSpec> for ChainSpec {
|
||||
fn from(fork_spec: ForkSpec) -> Self {
|
||||
impl ForkSpec {
|
||||
/// Converts this EF fork spec to a Reth [`ChainSpec`].
|
||||
pub fn to_chain_spec(self) -> Arc<ChainSpec> {
|
||||
static MAP: OnceLock<RwLock<HashMap<ForkSpec, Arc<ChainSpec>>>> = OnceLock::new();
|
||||
let map = MAP.get_or_init(Default::default);
|
||||
if let Some(r) = map.read().unwrap().get(&self) {
|
||||
return r.clone();
|
||||
}
|
||||
map.write()
|
||||
.unwrap()
|
||||
.entry(self)
|
||||
.or_insert_with(|| Arc::new(self.to_chain_spec_inner()))
|
||||
.clone()
|
||||
}
|
||||
|
||||
fn to_chain_spec_inner(self) -> ChainSpec {
|
||||
let spec_builder = ChainSpecBuilder::mainnet().reset();
|
||||
|
||||
match fork_spec {
|
||||
ForkSpec::Frontier => spec_builder.frontier_activated(),
|
||||
ForkSpec::FrontierToHomesteadAt5 => spec_builder
|
||||
match self {
|
||||
Self::Frontier => spec_builder.frontier_activated(),
|
||||
Self::FrontierToHomesteadAt5 => spec_builder
|
||||
.frontier_activated()
|
||||
.with_fork(EthereumHardfork::Homestead, ForkCondition::Block(5)),
|
||||
ForkSpec::Homestead => spec_builder.homestead_activated(),
|
||||
ForkSpec::HomesteadToDaoAt5 => spec_builder
|
||||
Self::Homestead => spec_builder.homestead_activated(),
|
||||
Self::HomesteadToDaoAt5 => spec_builder
|
||||
.homestead_activated()
|
||||
.with_fork(EthereumHardfork::Dao, ForkCondition::Block(5)),
|
||||
ForkSpec::HomesteadToEIP150At5 => spec_builder
|
||||
Self::HomesteadToEIP150At5 => spec_builder
|
||||
.homestead_activated()
|
||||
.with_fork(EthereumHardfork::Tangerine, ForkCondition::Block(5)),
|
||||
ForkSpec::EIP150 => spec_builder.tangerine_whistle_activated(),
|
||||
ForkSpec::EIP158 => spec_builder.spurious_dragon_activated(),
|
||||
ForkSpec::EIP158ToByzantiumAt5 => spec_builder
|
||||
Self::EIP150 => spec_builder.tangerine_whistle_activated(),
|
||||
Self::EIP158 => spec_builder.spurious_dragon_activated(),
|
||||
Self::EIP158ToByzantiumAt5 => spec_builder
|
||||
.spurious_dragon_activated()
|
||||
.with_fork(EthereumHardfork::Byzantium, ForkCondition::Block(5)),
|
||||
ForkSpec::Byzantium => spec_builder.byzantium_activated(),
|
||||
ForkSpec::ByzantiumToConstantinopleAt5 => spec_builder
|
||||
Self::Byzantium => spec_builder.byzantium_activated(),
|
||||
Self::ByzantiumToConstantinopleAt5 => spec_builder
|
||||
.byzantium_activated()
|
||||
.with_fork(EthereumHardfork::Constantinople, ForkCondition::Block(5)),
|
||||
ForkSpec::ByzantiumToConstantinopleFixAt5 => spec_builder
|
||||
Self::ByzantiumToConstantinopleFixAt5 => spec_builder
|
||||
.byzantium_activated()
|
||||
.with_fork(EthereumHardfork::Petersburg, ForkCondition::Block(5)),
|
||||
ForkSpec::Constantinople => spec_builder.constantinople_activated(),
|
||||
ForkSpec::ConstantinopleFix => spec_builder.petersburg_activated(),
|
||||
ForkSpec::Istanbul => spec_builder.istanbul_activated(),
|
||||
ForkSpec::Berlin => spec_builder.berlin_activated(),
|
||||
ForkSpec::BerlinToLondonAt5 => spec_builder
|
||||
Self::Constantinople => spec_builder.constantinople_activated(),
|
||||
Self::ConstantinopleFix => spec_builder.petersburg_activated(),
|
||||
Self::Istanbul => spec_builder.istanbul_activated(),
|
||||
Self::Berlin => spec_builder.berlin_activated(),
|
||||
Self::BerlinToLondonAt5 => spec_builder
|
||||
.berlin_activated()
|
||||
.with_fork(EthereumHardfork::London, ForkCondition::Block(5)),
|
||||
ForkSpec::London => spec_builder.london_activated(),
|
||||
ForkSpec::Merge |
|
||||
ForkSpec::MergeEOF |
|
||||
ForkSpec::MergeMeterInitCode |
|
||||
ForkSpec::MergePush0 => spec_builder.paris_activated(),
|
||||
ForkSpec::ParisToShanghaiAtTime15k => spec_builder
|
||||
Self::London => spec_builder.london_activated(),
|
||||
Self::Merge | Self::MergeEOF | Self::MergeMeterInitCode | Self::MergePush0 => {
|
||||
spec_builder.paris_activated()
|
||||
}
|
||||
Self::ParisToShanghaiAtTime15k => spec_builder
|
||||
.paris_activated()
|
||||
.with_fork(EthereumHardfork::Shanghai, ForkCondition::Timestamp(15_000)),
|
||||
ForkSpec::Shanghai => spec_builder.shanghai_activated(),
|
||||
ForkSpec::ShanghaiToCancunAtTime15k => spec_builder
|
||||
Self::Shanghai => spec_builder.shanghai_activated(),
|
||||
Self::ShanghaiToCancunAtTime15k => spec_builder
|
||||
.shanghai_activated()
|
||||
.with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(15_000)),
|
||||
ForkSpec::Cancun => spec_builder.cancun_activated(),
|
||||
ForkSpec::CancunToPragueAtTime15k => spec_builder
|
||||
Self::Cancun => spec_builder.cancun_activated(),
|
||||
Self::CancunToPragueAtTime15k => spec_builder
|
||||
.cancun_activated()
|
||||
.with_fork(EthereumHardfork::Prague, ForkCondition::Timestamp(15_000)),
|
||||
ForkSpec::Prague => spec_builder.prague_activated(),
|
||||
ForkSpec::Osaka => spec_builder.osaka_activated(),
|
||||
Self::Prague => spec_builder.prague_activated(),
|
||||
Self::Osaka => spec_builder.osaka_activated(),
|
||||
}
|
||||
.build()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
//! Test results and errors
|
||||
|
||||
use crate::Case;
|
||||
use reth_db::DatabaseError;
|
||||
use reth_ethereum_primitives::Block;
|
||||
use reth_primitives_traits::RecoveredBlock;
|
||||
@@ -93,8 +92,8 @@ pub struct CaseResult {
|
||||
|
||||
impl CaseResult {
|
||||
/// Create a new test result.
|
||||
pub fn new(path: &Path, case: &impl Case, result: Result<(), Error>) -> Self {
|
||||
Self { desc: case.description(), path: path.into(), result }
|
||||
pub fn new(path: &Path, desc: String, result: Result<(), Error>) -> Self {
|
||||
Self { desc, path: path.into(), result }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -108,5 +108,9 @@ fn eest_fixtures() {
|
||||
.join("execution-spec-tests")
|
||||
.join("blockchain_tests");
|
||||
|
||||
if !suite_path.exists() {
|
||||
return;
|
||||
}
|
||||
|
||||
BlockchainTests::new(suite_path).run();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user