mirror of
https://github.com/paradigmxyz/reth.git
synced 2026-01-22 21:58:05 -05:00
Merge branch 'paradigmxyz:main' into bal
This commit is contained in:
4
.gitignore
vendored
4
.gitignore
vendored
@@ -68,3 +68,7 @@ links-report.json
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# direnv
|
||||
.envrc
|
||||
.direnv/
|
||||
|
||||
@@ -65,17 +65,15 @@ impl BundleStateSorted {
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|(address, account)| {
|
||||
{
|
||||
(
|
||||
address,
|
||||
BundleAccountSorted {
|
||||
info: account.info,
|
||||
original_info: account.original_info,
|
||||
status: account.status,
|
||||
storage: BTreeMap::from_iter(account.storage),
|
||||
},
|
||||
)
|
||||
}
|
||||
(
|
||||
address,
|
||||
BundleAccountSorted {
|
||||
info: account.info,
|
||||
original_info: account.original_info,
|
||||
status: account.status,
|
||||
storage: BTreeMap::from_iter(account.storage),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
||||
@@ -125,7 +125,7 @@ pub trait RpcConvert: Send + Sync + Unpin + Clone + Debug + 'static {
|
||||
fn fill(
|
||||
&self,
|
||||
tx: Recovered<TxTy<Self::Primitives>>,
|
||||
tx_inf: TransactionInfo,
|
||||
tx_info: TransactionInfo,
|
||||
) -> Result<RpcTransaction<Self::Network>, Self::Error>;
|
||||
|
||||
/// Builds a fake transaction from a transaction request for inclusion into block built in
|
||||
|
||||
@@ -111,7 +111,7 @@ impl ExecInput {
|
||||
// body.
|
||||
let end_block_body = provider
|
||||
.block_body_indices(end_block_number)?
|
||||
.ok_or(ProviderError::BlockBodyIndicesNotFound(target_block))?;
|
||||
.ok_or(ProviderError::BlockBodyIndicesNotFound(end_block_number))?;
|
||||
(end_block_number, false, end_block_body.next_tx_num())
|
||||
};
|
||||
|
||||
|
||||
@@ -12,15 +12,16 @@ use alloc::{
|
||||
vec::Vec,
|
||||
};
|
||||
use alloy_consensus::{BlockHeader, Header};
|
||||
use alloy_primitives::B256;
|
||||
use alloy_rlp::Decodable;
|
||||
use alloy_primitives::{keccak256, B256};
|
||||
use reth_chainspec::{EthChainSpec, EthereumHardforks};
|
||||
use reth_consensus::{Consensus, HeaderValidator};
|
||||
use reth_errors::ConsensusError;
|
||||
use reth_ethereum_consensus::{validate_block_post_execution, EthBeaconConsensus};
|
||||
use reth_ethereum_primitives::{Block, EthPrimitives};
|
||||
use reth_evm::{execute::Executor, ConfigureEvm};
|
||||
use reth_primitives_traits::{block::error::BlockRecoveryError, Block as _, RecoveredBlock};
|
||||
use reth_primitives_traits::{
|
||||
block::error::BlockRecoveryError, Block as _, RecoveredBlock, SealedHeader,
|
||||
};
|
||||
use reth_trie_common::{HashedPostState, KeccakKeyHasher};
|
||||
|
||||
/// Errors that can occur during stateless validation.
|
||||
@@ -167,12 +168,13 @@ where
|
||||
.try_into_recovered()
|
||||
.map_err(|err| StatelessValidationError::SignerRecovery(Box::new(err)))?;
|
||||
|
||||
let mut ancestor_headers: Vec<Header> = witness
|
||||
let mut ancestor_headers: Vec<_> = witness
|
||||
.headers
|
||||
.iter()
|
||||
.map(|serialized_header| {
|
||||
let bytes = serialized_header.as_ref();
|
||||
Header::decode(&mut &bytes[..])
|
||||
.map(|bytes| {
|
||||
let hash = keccak256(bytes);
|
||||
alloy_rlp::decode_exact::<Header>(bytes)
|
||||
.map(|h| SealedHeader::new(h, hash))
|
||||
.map_err(|_| StatelessValidationError::HeaderDeserializationFailed)
|
||||
})
|
||||
.collect::<Result<_, _>>()?;
|
||||
@@ -180,25 +182,22 @@ where
|
||||
// ascending order.
|
||||
ancestor_headers.sort_by_key(|header| header.number());
|
||||
|
||||
// Validate block against pre-execution consensus rules
|
||||
validate_block_consensus(chain_spec.clone(), ¤t_block)?;
|
||||
|
||||
// Check that the ancestor headers form a contiguous chain and are not just random headers.
|
||||
let ancestor_hashes = compute_ancestor_hashes(¤t_block, &ancestor_headers)?;
|
||||
|
||||
// Get the last ancestor header and retrieve its state root.
|
||||
//
|
||||
// There should be at least one ancestor header, this is because we need the parent header to
|
||||
// retrieve the previous state root.
|
||||
// There should be at least one ancestor header.
|
||||
// The edge case here would be the genesis block, but we do not create proofs for the genesis
|
||||
// block.
|
||||
let pre_state_root = match ancestor_headers.last() {
|
||||
Some(prev_header) => prev_header.state_root,
|
||||
let parent = match ancestor_headers.last() {
|
||||
Some(prev_header) => prev_header,
|
||||
None => return Err(StatelessValidationError::MissingAncestorHeader),
|
||||
};
|
||||
|
||||
// Validate block against pre-execution consensus rules
|
||||
validate_block_consensus(chain_spec.clone(), ¤t_block, parent)?;
|
||||
|
||||
// First verify that the pre-state reads are correct
|
||||
let (mut trie, bytecode) = T::new(&witness, pre_state_root)?;
|
||||
let (mut trie, bytecode) = T::new(&witness, parent.state_root)?;
|
||||
|
||||
// Create an in-memory database that will use the reads to validate the block
|
||||
let db = WitnessDatabase::new(&trie, bytecode, ancestor_hashes);
|
||||
@@ -231,17 +230,14 @@ where
|
||||
///
|
||||
/// This function validates a block against Ethereum consensus rules by:
|
||||
///
|
||||
/// 1. **Difficulty Validation:** Validates the header with total difficulty to verify proof-of-work
|
||||
/// (pre-merge) or to enforce post-merge requirements.
|
||||
///
|
||||
/// 2. **Header Validation:** Validates the sealed header against protocol specifications,
|
||||
/// 1. **Header Validation:** Validates the sealed header against protocol specifications,
|
||||
/// including:
|
||||
/// - Gas limit checks
|
||||
/// - Base fee validation for EIP-1559
|
||||
/// - Withdrawals root validation for Shanghai fork
|
||||
/// - Blob-related fields validation for Cancun fork
|
||||
///
|
||||
/// 3. **Pre-Execution Validation:** Validates block structure, transaction format, signature
|
||||
/// 2. **Pre-Execution Validation:** Validates block structure, transaction format, signature
|
||||
/// validity, and other pre-execution requirements.
|
||||
///
|
||||
/// This function acts as a preliminary validation before executing and validating the state
|
||||
@@ -249,6 +245,7 @@ where
|
||||
fn validate_block_consensus<ChainSpec>(
|
||||
chain_spec: Arc<ChainSpec>,
|
||||
block: &RecoveredBlock<Block>,
|
||||
parent: &SealedHeader<Header>,
|
||||
) -> Result<(), StatelessValidationError>
|
||||
where
|
||||
ChainSpec: Send + Sync + EthChainSpec<Header = Header> + EthereumHardforks + Debug,
|
||||
@@ -256,6 +253,7 @@ where
|
||||
let consensus = EthBeaconConsensus::new(chain_spec);
|
||||
|
||||
consensus.validate_header(block.sealed_header())?;
|
||||
consensus.validate_header_against_parent(block.sealed_header(), parent)?;
|
||||
|
||||
consensus.validate_block_pre_execution(block)?;
|
||||
|
||||
@@ -277,18 +275,18 @@ where
|
||||
/// ancestor header to its corresponding block hash.
|
||||
fn compute_ancestor_hashes(
|
||||
current_block: &RecoveredBlock<Block>,
|
||||
ancestor_headers: &[Header],
|
||||
ancestor_headers: &[SealedHeader],
|
||||
) -> Result<BTreeMap<u64, B256>, StatelessValidationError> {
|
||||
let mut ancestor_hashes = BTreeMap::new();
|
||||
|
||||
let mut child_header = current_block.header();
|
||||
let mut child_header = current_block.sealed_header();
|
||||
|
||||
// Next verify that headers supplied are contiguous
|
||||
for parent_header in ancestor_headers.iter().rev() {
|
||||
let parent_hash = child_header.parent_hash();
|
||||
ancestor_hashes.insert(parent_header.number, parent_hash);
|
||||
|
||||
if parent_hash != parent_header.hash_slow() {
|
||||
if parent_hash != parent_header.hash() {
|
||||
return Err(StatelessValidationError::InvalidAncestorChain); // Blocks must be contiguous
|
||||
}
|
||||
|
||||
|
||||
116
flake.lock
generated
Normal file
116
flake.lock
generated
Normal file
@@ -0,0 +1,116 @@
|
||||
{
|
||||
"nodes": {
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1754269165,
|
||||
"narHash": "sha256-0tcS8FHd4QjbCVoxN9jI+PjHgA4vc/IjkUSp+N3zy0U=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "444e81206df3f7d92780680e45858e31d2f07a08",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"fenix": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rust-analyzer-src": "rust-analyzer-src"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1754549159,
|
||||
"narHash": "sha256-47e1Ar09kZlv2HvZilaNRFzRybIiJYNQ2MSvofbiw5o=",
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"rev": "5fe110751342a023d8c7ddce7fbf8311dca9f58d",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"repo": "fenix",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1731603435,
|
||||
"narHash": "sha256-CqCX4JG7UiHvkrBTpYC3wcEurvbtTADLbo3Ns2CEoL8=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "8b27c1239e5c421a2bbc2c65d52e4a6fbf2ff296",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "24.11",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"crane": "crane",
|
||||
"fenix": "fenix",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"utils": "utils"
|
||||
}
|
||||
},
|
||||
"rust-analyzer-src": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1754496778,
|
||||
"narHash": "sha256-fPDLP3z9XaYQBfSCemEdloEONz/uPyr35RHPRy9Vx8M=",
|
||||
"owner": "rust-lang",
|
||||
"repo": "rust-analyzer",
|
||||
"rev": "529d3b935d68bdf9120fe4d7f8eded7b56271697",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "rust-lang",
|
||||
"ref": "nightly",
|
||||
"repo": "rust-analyzer",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
127
flake.nix
Normal file
127
flake.nix
Normal file
@@ -0,0 +1,127 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/24.11";
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
crane.url = "github:ipetkov/crane";
|
||||
|
||||
fenix = {
|
||||
url = "github:nix-community/fenix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs =
|
||||
{
|
||||
nixpkgs,
|
||||
utils,
|
||||
crane,
|
||||
fenix,
|
||||
...
|
||||
}:
|
||||
utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
|
||||
# A useful helper for folding a list of `prevSet -> newSet` functions
|
||||
# into an attribute set.
|
||||
composeAttrOverrides = defaultAttrs: overrides: builtins.foldl'
|
||||
(acc: f: acc // (f acc)) defaultAttrs overrides;
|
||||
|
||||
cargoTarget = pkgs.stdenv.hostPlatform.rust.rustcTargetSpec;
|
||||
cargoTargetEnvVar = builtins.replaceStrings ["-"] ["_"]
|
||||
(pkgs.lib.toUpper cargoTarget);
|
||||
|
||||
cargoTOML = builtins.fromTOML (builtins.readFile ./Cargo.toml);
|
||||
packageVersion = cargoTOML.workspace.package.version;
|
||||
rustVersion = cargoTOML.workspace.package."rust-version";
|
||||
|
||||
rustStable = fenix.packages.${system}.stable.withComponents [
|
||||
"cargo" "rustc" "rust-src"
|
||||
];
|
||||
|
||||
rustNightly = fenix.packages.${system}.latest;
|
||||
|
||||
craneLib = (crane.mkLib pkgs).overrideToolchain rustStable;
|
||||
|
||||
nativeBuildInputs = [
|
||||
pkgs.pkg-config
|
||||
pkgs.libgit2
|
||||
pkgs.perl
|
||||
];
|
||||
|
||||
withClang = prev: {
|
||||
buildInputs = prev.buildInputs or [] ++ [
|
||||
pkgs.clang
|
||||
];
|
||||
LIBCLANG_PATH = "${pkgs.libclang.lib}/lib";
|
||||
};
|
||||
|
||||
withMaxPerf = prev: {
|
||||
cargoBuildCommand = "cargo build --profile=maxperf";
|
||||
cargoExtraArgs = prev.cargoExtraArgs or "" + " --features=jemalloc,asm-keccak";
|
||||
RUSTFLAGS = prev.RUSTFLAGS or [] ++ [
|
||||
"-Ctarget-cpu=native"
|
||||
];
|
||||
};
|
||||
|
||||
withMold = prev: {
|
||||
buildInputs = prev.buildInputs or [] ++ [
|
||||
pkgs.mold
|
||||
];
|
||||
"CARGO_TARGET_${cargoTargetEnvVar}_LINKER" = "${pkgs.llvmPackages.clangUseLLVM}/bin/clang";
|
||||
RUSTFLAGS = prev.RUSTFLAGS or [] ++ [
|
||||
"-Clink-arg=-fuse-ld=${pkgs.mold}/bin/mold"
|
||||
];
|
||||
};
|
||||
|
||||
withOp = prev: {
|
||||
cargoExtraArgs = prev.cargoExtraArgs or "" + " -p op-reth --bin=op-reth";
|
||||
};
|
||||
|
||||
mkReth = overrides: craneLib.buildPackage (composeAttrOverrides {
|
||||
pname = "reth";
|
||||
version = packageVersion;
|
||||
src = ./.;
|
||||
inherit nativeBuildInputs;
|
||||
doCheck = false;
|
||||
} overrides);
|
||||
|
||||
in
|
||||
{
|
||||
packages = rec {
|
||||
|
||||
reth = mkReth ([
|
||||
withClang
|
||||
withMaxPerf
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [
|
||||
withMold
|
||||
]);
|
||||
|
||||
op-reth = mkReth ([
|
||||
withClang
|
||||
withMaxPerf
|
||||
withOp
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [
|
||||
withMold
|
||||
]);
|
||||
|
||||
default = reth;
|
||||
};
|
||||
|
||||
devShell = let
|
||||
overrides = [
|
||||
withClang
|
||||
] ++ pkgs.lib.optionals pkgs.stdenv.isLinux [
|
||||
withMold
|
||||
];
|
||||
in craneLib.devShell (composeAttrOverrides {
|
||||
packages = nativeBuildInputs ++ [
|
||||
rustNightly.rust-analyzer
|
||||
rustNightly.clippy
|
||||
rustNightly.rustfmt
|
||||
];
|
||||
} overrides);
|
||||
}
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user