refactor: remove validate_header_with_total_difficulty (#15903)

This commit is contained in:
Harrish Bansal
2025-04-25 19:08:29 +05:30
committed by GitHub
parent 0253bad654
commit 82d6505948
10 changed files with 76 additions and 168 deletions

View File

@@ -13,7 +13,7 @@ extern crate alloc;
use alloc::{fmt::Debug, string::String, vec::Vec};
use alloy_consensus::Header;
use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256, U256};
use alloy_primitives::{BlockHash, BlockNumber, Bloom, B256};
use reth_execution_types::BlockExecutionResult;
use reth_primitives_traits::{
constants::{MAXIMUM_GAS_LIMIT_BLOCK, MINIMUM_GAS_LIMIT},
@@ -121,18 +121,6 @@ pub trait HeaderValidator<H = Header>: Debug + Send + Sync {
}
Ok(())
}
/// Validate if the header is correct and follows the consensus specification, including
/// computed properties (like total difficulty).
///
/// Some consensus engines may want to do additional checks here.
///
/// Note: validating headers with TD does not include other HeaderValidator validation.
fn validate_header_with_total_difficulty(
&self,
header: &H,
total_difficulty: U256,
) -> Result<(), ConsensusError>;
}
/// Consensus Errors

View File

@@ -1,6 +1,5 @@
use crate::{Consensus, ConsensusError, FullConsensus, HeaderValidator};
use alloc::sync::Arc;
use alloy_primitives::U256;
use reth_execution_types::BlockExecutionResult;
use reth_primitives_traits::{Block, NodePrimitives, RecoveredBlock, SealedBlock, SealedHeader};
@@ -28,14 +27,6 @@ impl<H> HeaderValidator<H> for NoopConsensus {
) -> Result<(), ConsensusError> {
Ok(())
}
fn validate_header_with_total_difficulty(
&self,
_header: &H,
_total_difficulty: U256,
) -> Result<(), ConsensusError> {
Ok(())
}
}
impl<B: Block> Consensus<B> for NoopConsensus {

View File

@@ -1,5 +1,4 @@
use crate::{Consensus, ConsensusError, FullConsensus, HeaderValidator};
use alloy_primitives::U256;
use core::sync::atomic::{AtomicBool, Ordering};
use reth_execution_types::BlockExecutionResult;
use reth_primitives_traits::{Block, NodePrimitives, RecoveredBlock, SealedBlock, SealedHeader};
@@ -105,16 +104,4 @@ impl<H> HeaderValidator<H> for TestConsensus {
Ok(())
}
}
fn validate_header_with_total_difficulty(
&self,
_header: &H,
_total_difficulty: U256,
) -> Result<(), ConsensusError> {
if self.fail_validation() {
Err(ConsensusError::BaseFeeMissing)
} else {
Ok(())
}
}
}

View File

@@ -11,7 +11,7 @@ use alloy_consensus::BlockHeader;
use alloy_eips::{merge::EPOCH_SLOTS, BlockNumHash, NumHash};
use alloy_primitives::{
map::{HashMap, HashSet},
BlockNumber, B256, U256,
BlockNumber, B256,
};
use alloy_rpc_types_engine::{
ForkchoiceState, PayloadStatus, PayloadStatusEnum, PayloadValidationError,
@@ -1906,18 +1906,6 @@ where
/// Validate if block is correct and satisfies all the consensus rules that concern the header
/// and block body itself.
fn validate_block(&self, block: &RecoveredBlock<N::Block>) -> Result<(), ConsensusError> {
if let Err(e) =
self.consensus.validate_header_with_total_difficulty(block.header(), U256::MAX)
{
error!(
target: "engine::tree",
?block,
"Failed to validate total difficulty for block {}: {e}",
block.hash()
);
return Err(e)
}
if let Err(e) = self.consensus.validate_header(block.sealed_header()) {
error!(target: "engine::tree", ?block, "Failed to validate header {}: {e}", block.hash());
return Err(e)

View File

@@ -4,7 +4,7 @@ use alloy_eips::{
eip2718::Encodable2718,
eip4844::{env_settings::EnvKzgSettings, BlobTransactionSidecar},
};
use alloy_primitives::{Address, Bytes, B256, U256};
use alloy_primitives::{Address, Bytes, B256};
use alloy_rlp::Decodable;
use alloy_rpc_types::engine::{BlobsBundleV1, PayloadAttributes};
use clap::Parser;
@@ -218,7 +218,6 @@ impl<C: ChainSpecParser<ChainSpec = ChainSpec>> Command<C> {
let block = payload.block();
debug!(target: "reth::cli", ?block, "Built new payload");
consensus.validate_header_with_total_difficulty(block, U256::MAX)?;
consensus.validate_header(block.sealed_header())?;
consensus.validate_block_pre_execution(block)?;

View File

@@ -14,7 +14,6 @@ extern crate alloc;
use alloc::{fmt::Debug, sync::Arc};
use alloy_consensus::EMPTY_OMMER_ROOT_HASH;
use alloy_eips::eip7840::BlobParams;
use alloy_primitives::U256;
use reth_chainspec::{EthChainSpec, EthereumHardforks};
use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator};
use reth_consensus_common::validation::{
@@ -137,8 +136,42 @@ where
H: BlockHeader,
{
fn validate_header(&self, header: &SealedHeader<H>) -> Result<(), ConsensusError> {
validate_header_gas(header.header())?;
validate_header_base_fee(header.header(), &self.chain_spec)?;
let header = header.header();
let is_post_merge = self.chain_spec.is_paris_active_at_block(header.number());
if is_post_merge {
if !header.difficulty().is_zero() {
return Err(ConsensusError::TheMergeDifficultyIsNotZero);
}
if !header.nonce().is_some_and(|nonce| nonce.is_zero()) {
return Err(ConsensusError::TheMergeNonceIsNotZero);
}
if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH {
return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty);
}
} else {
#[cfg(feature = "std")]
{
let present_timestamp = std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if header.timestamp() >
present_timestamp + alloy_eips::merge::ALLOWED_FUTURE_BLOCK_TIME_SECONDS
{
return Err(ConsensusError::TimestampIsInFuture {
timestamp: header.timestamp(),
present_timestamp,
});
}
}
}
validate_header_extra_data(header)?;
validate_header_gas(header)?;
validate_header_base_fee(header, &self.chain_spec)?;
// EIP-4895: Beacon chain push withdrawals as operations
if self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp()) &&
@@ -154,7 +187,7 @@ where
// Ensures that EIP-4844 fields are valid once cancun is active.
if self.chain_spec.is_cancun_active_at_timestamp(header.timestamp()) {
validate_4844_header_standalone(
header.header(),
header,
self.chain_spec
.blob_params_at_timestamp(header.timestamp())
.unwrap_or_else(BlobParams::cancun),
@@ -204,67 +237,6 @@ where
Ok(())
}
fn validate_header_with_total_difficulty(
&self,
header: &H,
_total_difficulty: U256,
) -> Result<(), ConsensusError> {
let is_post_merge = self.chain_spec.is_paris_active_at_block(header.number());
if is_post_merge {
if !header.difficulty().is_zero() {
return Err(ConsensusError::TheMergeDifficultyIsNotZero)
}
if !header.nonce().is_some_and(|nonce| nonce.is_zero()) {
return Err(ConsensusError::TheMergeNonceIsNotZero)
}
if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH {
return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty)
}
// Post-merge, the consensus layer is expected to perform checks such that the block
// timestamp is a function of the slot. This is different from pre-merge, where blocks
// are only allowed to be in the future (compared to the system's clock) by a certain
// threshold.
//
// Block validation with respect to the parent should ensure that the block timestamp
// is greater than its parent timestamp.
// validate header extra data for all networks post merge
validate_header_extra_data(header)?;
// mixHash is used instead of difficulty inside EVM
// https://eips.ethereum.org/EIPS/eip-4399#using-mixhash-field-instead-of-difficulty
} else {
// Note: This does not perform any pre merge checks for difficulty, mix_hash & nonce
// because those are deprecated and the expectation is that a reth node syncs in reverse
// order, making those checks obsolete.
// Check if timestamp is in the future. Clock can drift but this can be consensus issue.
#[cfg(feature = "std")]
{
let present_timestamp = std::time::SystemTime::now()
.duration_since(std::time::SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if header.timestamp() >
present_timestamp + alloy_eips::merge::ALLOWED_FUTURE_BLOCK_TIME_SECONDS
{
return Err(ConsensusError::TimestampIsInFuture {
timestamp: header.timestamp(),
present_timestamp,
})
}
}
validate_header_extra_data(header)?;
}
Ok(())
}
}
#[cfg(test)]

View File

@@ -13,7 +13,7 @@ extern crate alloc;
use alloc::{format, sync::Arc};
use alloy_consensus::{BlockHeader as _, EMPTY_OMMER_ROOT_HASH};
use alloy_primitives::{B64, U256};
use alloy_primitives::B64;
use core::fmt::Debug;
use reth_chainspec::{EthChainSpec, EthereumHardforks};
use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator};
@@ -134,8 +134,33 @@ impl<ChainSpec: EthChainSpec + OpHardforks, H: BlockHeader> HeaderValidator<H>
for OpBeaconConsensus<ChainSpec>
{
fn validate_header(&self, header: &SealedHeader<H>) -> Result<(), ConsensusError> {
validate_header_gas(header.header())?;
validate_header_base_fee(header.header(), &self.chain_spec)
let header = header.header();
// with OP-stack Bedrock activation number determines when TTD (eth Merge) has been reached.
debug_assert!(
self.chain_spec.is_bedrock_active_at_block(header.number()),
"manually import OVM blocks"
);
if header.nonce() != Some(B64::ZERO) {
return Err(ConsensusError::TheMergeNonceIsNotZero)
}
if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH {
return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty)
}
// Post-merge, the consensus layer is expected to perform checks such that the block
// timestamp is a function of the slot. This is different from pre-merge, where blocks
// are only allowed to be in the future (compared to the system's clock) by a certain
// threshold.
//
// Block validation with respect to the parent should ensure that the block timestamp
// is greater than its parent timestamp.
// validate header extra data for all networks post merge
validate_header_extra_data(header)?;
validate_header_gas(header)?;
validate_header_base_fee(header, &self.chain_spec)
}
fn validate_header_against_parent(
@@ -180,40 +205,4 @@ impl<ChainSpec: EthChainSpec + OpHardforks, H: BlockHeader> HeaderValidator<H>
Ok(())
}
fn validate_header_with_total_difficulty(
&self,
header: &H,
_total_difficulty: U256,
) -> Result<(), ConsensusError> {
// with OP-stack Bedrock activation number determines when TTD (eth Merge) has been reached.
debug_assert!(
self.chain_spec.is_bedrock_active_at_block(header.number()),
"manually import OVM blocks"
);
if header.nonce() != Some(B64::ZERO) {
return Err(ConsensusError::TheMergeNonceIsNotZero)
}
if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH {
return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty)
}
// Post-merge, the consensus layer is expected to perform checks such that the block
// timestamp is a function of the slot. This is different from pre-merge, where blocks
// are only allowed to be in the future (compared to the system's clock) by a certain
// threshold.
//
// Block validation with respect to the parent should ensure that the block timestamp
// is greater than its parent timestamp.
// validate header extra data for all networks post merge
validate_header_extra_data(header)?;
// mixHash is used instead of difficulty inside EVM
// https://eips.ethereum.org/EIPS/eip-4399#using-mixhash-field-instead-of-difficulty
Ok(())
}
}

View File

@@ -118,7 +118,6 @@ where
) -> Result<(), ValidationApiError> {
self.validate_message_against_header(block.sealed_header(), &message)?;
self.consensus.validate_header_with_total_difficulty(block.sealed_header(), U256::MAX)?;
self.consensus.validate_header(block.sealed_header())?;
self.consensus.validate_block_pre_execution(block.sealed_block())?;

View File

@@ -136,7 +136,7 @@ where
.map_err(|err| StageError::Fatal(Box::new(err)))?
.into();
let (header, header_hash) = sealed_header.split();
let (header, header_hash) = sealed_header.split_ref();
if header.number() == 0 {
continue
}
@@ -145,19 +145,16 @@ where
// Increase total difficulty
td += header.difficulty();
// Header validation
self.consensus.validate_header_with_total_difficulty(&header, td).map_err(|error| {
StageError::Block {
block: Box::new(BlockWithParent::new(
header.parent_hash(),
NumHash::new(header.number(), header_hash),
)),
error: BlockErrorKind::Validation(error),
}
self.consensus.validate_header(&sealed_header).map_err(|error| StageError::Block {
block: Box::new(BlockWithParent::new(
sealed_header.parent_hash(),
NumHash::new(sealed_header.number(), sealed_header.hash()),
)),
error: BlockErrorKind::Validation(error),
})?;
// Append to Headers segment
writer.append_header(&header, td, &header_hash)?;
writer.append_header(header, td, header_hash)?;
}
info!(target: "sync::stages::headers", total = total_headers, "Writing headers hash index");

View File

@@ -315,7 +315,6 @@ fn pre_execution_checks(
let consensus: EthBeaconConsensus<ChainSpec> = EthBeaconConsensus::new(chain_spec);
let sealed_header = block.sealed_header();
let header = block.header();
<EthBeaconConsensus<ChainSpec> as Consensus<Block>>::validate_body_against_header(
&consensus,
@@ -323,7 +322,6 @@ fn pre_execution_checks(
sealed_header,
)?;
consensus.validate_header_against_parent(sealed_header, parent.sealed_header())?;
consensus.validate_header_with_total_difficulty(header, block.difficulty)?;
consensus.validate_header(sealed_header)?;
consensus.validate_block_pre_execution(block)?;