feat: add reth init and reth import (#877)

Co-authored-by: Dan Cline <6798349+Rjected@users.noreply.github.com>
This commit is contained in:
Bjerg
2023-02-09 21:08:10 +01:00
committed by GitHub
parent 0f2d345970
commit 440718288d
19 changed files with 515 additions and 116 deletions

View File

@@ -67,6 +67,7 @@ where
..Default::default()
},
);
self.evm.env.cfg.chain_id = U256::from(self.chain_spec.chain().id());
self.evm.env.cfg.spec_id = spec_id;
self.evm.env.cfg.perf_all_precompiles_have_balance = false;

View File

@@ -32,6 +32,7 @@ reth-rlp = { path = "../../rlp", optional = true }
tokio-util = { version = "0.7", features = ["codec"], optional = true }
bytes = { version = "1", optional = true }
tempfile = { version = "3.3", optional = true }
itertools = { version = "0.10", optional = true }
[dev-dependencies]
reth-db = { path = "../../storage/db", features = ["test-utils"] }
@@ -42,10 +43,11 @@ assert_matches = "1.5.0"
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
tokio-util = { version = "0.7", features = ["codec"] }
reth-rlp = { path = "../../rlp" }
itertools = "0.10"
bytes = "1"
thiserror = "1"
tempfile = "3.3"
[features]
test-utils = ["dep:reth-rlp", "dep:thiserror", "dep:tokio-util", "dep:tempfile", "dep:bytes"]
test-utils = ["dep:reth-rlp", "dep:thiserror", "dep:tokio-util", "dep:tempfile", "dep:bytes", "dep:itertools"]

View File

@@ -1,6 +1,5 @@
use crate::metrics::DownloaderMetrics;
use super::queue::BodiesRequestQueue;
use crate::{bodies::task::TaskDownloader, metrics::DownloaderMetrics};
use futures::Stream;
use futures_util::StreamExt;
use reth_db::{cursor::DbCursorRO, database::Database, tables, transaction::DbTx};
@@ -186,7 +185,7 @@ where
fn is_terminated(&self) -> bool {
// There is nothing to request if the range is empty
let nothing_to_request = self.download_range.is_empty() ||
// or all blocks have already been requested.
// or all blocks have already been requested.
self.in_progress_queue
.last_requested_block_number
.map(|last| last + 1 == self.download_range.end)
@@ -241,6 +240,19 @@ where
}
}
impl<B, DB> BodiesDownloader<B, DB>
where
B: BodiesClient + 'static,
DB: Database,
Self: BodyDownloader + 'static,
{
/// Convert the downloader into a [`TaskDownloader`](super::task::TaskDownloader) by spawning
/// it.
pub fn as_task(self) -> TaskDownloader {
TaskDownloader::spawn(self)
}
}
impl<B, DB> BodyDownloader for BodiesDownloader<B, DB>
where
B: BodiesClient + 'static,

View File

@@ -1,5 +1,6 @@
//! A headers downloader that can handle multiple requests concurrently.
use super::task::TaskDownloader;
use crate::metrics::DownloaderMetrics;
use futures::{stream::Stream, FutureExt};
use futures_util::{stream::FuturesUnordered, StreamExt};
@@ -357,6 +358,8 @@ where
// validate the response
let highest = &headers[0];
trace!(target: "downloaders::headers", requested_block_number, highest=?highest.number, "Validating non-empty headers response");
if highest.number != requested_block_number {
return Err(HeadersResponseError {
request,
@@ -462,6 +465,7 @@ where
/// Starts a request future
fn submit_request(&mut self, request: HeadersRequest, priority: Priority) {
trace!(target: "downloaders::headers", ?request, "Submitting headers request");
self.in_progress_queue.push(self.request_fut(request, priority));
}
@@ -499,6 +503,18 @@ where
}
}
impl<H> ReverseHeadersDownloader<H>
where
H: HeadersClient,
Self: HeaderDownloader + 'static,
{
/// Convert the downloader into a [`TaskDownloader`](super::task::TaskDownloader) by spawning
/// it.
pub fn as_task(self) -> TaskDownloader {
TaskDownloader::spawn(self)
}
}
impl<H> HeaderDownloader for ReverseHeadersDownloader<H>
where
H: HeadersClient + 'static,

View File

@@ -1,4 +1,5 @@
use super::file_codec::BlockFileCodec;
use itertools::Either;
use reth_eth_wire::{BlockBody, RawBlockBody};
use reth_interfaces::{
p2p::{
@@ -30,7 +31,7 @@ use tokio::{
};
use tokio_stream::StreamExt;
use tokio_util::codec::FramedRead;
use tracing::warn;
use tracing::{trace, warn};
/// Front-end API for fetching chain data from a file.
///
@@ -94,26 +95,29 @@ impl FileClient {
// use with_capacity to make sure the internal buffer contains the entire file
let mut stream = FramedRead::with_capacity(&reader[..], BlockFileCodec, file_len as usize);
let mut block_num = 0;
while let Some(block_res) = stream.next().await {
let block = block_res?;
let block_hash = block.header.hash_slow();
// add to the internal maps
headers.insert(block_num, block.header.clone());
hash_to_number.insert(block_hash, block_num);
headers.insert(block.header.number, block.header.clone());
hash_to_number.insert(block_hash, block.header.number);
bodies.insert(
block_hash,
BlockBody { transactions: block.transactions, ommers: block.ommers },
);
// update block num
block_num += 1;
}
trace!(blocks = headers.len(), "Initialized file client");
Ok(Self { headers, hash_to_number, bodies, is_syncing: Arc::new(Default::default()) })
}
/// Get the tip hash of the chain.
pub fn tip(&self) -> Option<H256> {
self.headers.get(&(self.headers.len() as u64 - 1)).map(|h| h.hash_slow())
}
/// Use the provided bodies as the file client's block body buffer.
pub(crate) fn with_bodies(mut self, bodies: HashMap<BlockHash, BlockBody>) -> Self {
self.bodies = bodies;
@@ -140,24 +144,39 @@ impl HeadersClient for FileClient {
) -> Self::Output {
// this just searches the buffer, and fails if it can't find the header
let mut headers = Vec::new();
trace!(target : "downloaders::file", request=?request, "Getting headers");
let start_num = match request.start {
BlockHashOrNumber::Hash(hash) => match self.hash_to_number.get(&hash) {
Some(num) => *num,
None => return Box::pin(async move { Err(RequestError::BadResponse) }),
None => {
warn!(%hash, "Could not find starting block number for requested header hash");
return Box::pin(async move { Err(RequestError::BadResponse) })
}
},
BlockHashOrNumber::Number(num) => num,
};
let range = match request.direction {
HeadersDirection::Rising => start_num..=start_num + 1 - request.limit,
HeadersDirection::Falling => start_num + 1 - request.limit..=start_num,
let range = if request.limit == 1 {
Either::Left(start_num..start_num + 1)
} else {
match request.direction {
HeadersDirection::Rising => Either::Left(start_num..start_num + request.limit),
HeadersDirection::Falling => {
Either::Right((start_num - request.limit + 1..=start_num).rev())
}
}
};
trace!(target : "downloaders::file", range=?range, "Getting headers with range");
for block_number in range {
match self.headers.get(&block_number).cloned() {
Some(header) => headers.push(header),
None => return Box::pin(async move { Err(RequestError::BadResponse) }),
None => {
warn!(number=%block_number, "Could not find header");
return Box::pin(async move { Err(RequestError::BadResponse) })
}
}
}

View File

@@ -7,7 +7,9 @@ use std::{fmt, str::FromStr};
// The chain spec module.
mod spec;
pub use spec::{ChainSpec, ChainSpecBuilder, ForkCondition, GOERLI, MAINNET, SEPOLIA};
pub use spec::{
AllGenesisFormats, ChainSpec, ChainSpecBuilder, ForkCondition, GOERLI, MAINNET, SEPOLIA,
};
// The chain info module.
mod info;

View File

@@ -260,6 +260,37 @@ impl From<EthersGenesis> for ChainSpec {
}
}
/// A helper type for compatibility with geth's config
#[derive(Debug, Clone, Deserialize)]
#[serde(untagged)]
pub enum AllGenesisFormats {
/// The geth genesis format
Geth(EthersGenesis),
/// The reth genesis format
Reth(ChainSpec),
}
impl From<EthersGenesis> for AllGenesisFormats {
fn from(genesis: EthersGenesis) -> Self {
Self::Geth(genesis)
}
}
impl From<ChainSpec> for AllGenesisFormats {
fn from(genesis: ChainSpec) -> Self {
Self::Reth(genesis)
}
}
impl From<AllGenesisFormats> for ChainSpec {
fn from(genesis: AllGenesisFormats) -> Self {
match genesis {
AllGenesisFormats::Geth(genesis) => genesis.into(),
AllGenesisFormats::Reth(genesis) => genesis,
}
}
}
/// A helper to build custom chain specs
#[derive(Debug, Default)]
pub struct ChainSpecBuilder {

View File

@@ -38,7 +38,8 @@ pub use bits::H512;
pub use block::{Block, BlockHashOrNumber, SealedBlock};
pub use bloom::Bloom;
pub use chain::{
Chain, ChainInfo, ChainSpec, ChainSpecBuilder, ForkCondition, GOERLI, MAINNET, SEPOLIA,
AllGenesisFormats, Chain, ChainInfo, ChainSpec, ChainSpecBuilder, ForkCondition, GOERLI,
MAINNET, SEPOLIA,
};
pub use constants::{
EMPTY_OMMER_ROOT, GOERLI_GENESIS, KECCAK_EMPTY, MAINNET_GENESIS, SEPOLIA_GENESIS,

View File

@@ -19,6 +19,7 @@ reth-db = {path = "../../crates/storage/db", features = ["mdbx", "test-utils"] }
reth-discv4 = { path = "../../crates/net/discv4" }
reth-network-api = { path = "../../crates/net/network-api" }
reth-network = { path = "../../crates/net/network", features = ["serde"] }
reth-downloaders = { path = "../../crates/net/downloaders" }
reth-primitives = { path = "../../crates/primitives" }
reth-provider = { path = "../../crates/storage/provider", features = ["test-utils"] }
reth-net-nat = { path = "../../crates/net/nat" }

View File

@@ -3,6 +3,10 @@ use std::{path::PathBuf, sync::Arc};
use reth_db::database::Database;
use reth_discv4::Discv4Config;
use reth_downloaders::{
bodies::bodies::BodiesDownloaderBuilder,
headers::reverse_headers::ReverseHeadersDownloaderBuilder,
};
use reth_network::{
config::{mainnet_nodes, rng_secret_key},
NetworkConfig, NetworkConfigBuilder, PeersConfig,
@@ -66,24 +70,30 @@ pub struct StageConfig {
}
/// Header stage configuration.
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Serialize)]
pub struct HeadersConfig {
/// The maximum number of headers to download before committing progress to the database.
pub commit_threshold: u64,
/// The maximum number of headers to request from a peer at a time.
pub downloader_batch_size: u64,
/// The number of times to retry downloading a set of headers.
pub downloader_retries: usize,
}
impl Default for HeadersConfig {
fn default() -> Self {
Self { commit_threshold: 10_000, downloader_batch_size: 1000, downloader_retries: 5 }
Self { commit_threshold: 10_000, downloader_batch_size: 1000 }
}
}
impl From<HeadersConfig> for ReverseHeadersDownloaderBuilder {
fn from(config: HeadersConfig) -> Self {
ReverseHeadersDownloaderBuilder::default()
.request_limit(config.downloader_batch_size)
.stream_batch_size(config.commit_threshold as usize)
}
}
/// Total difficulty stage configuration
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Serialize)]
pub struct TotalDifficultyConfig {
/// The maximum number of total difficulty entries to sum up before committing progress to the
/// database.
@@ -97,7 +107,7 @@ impl Default for TotalDifficultyConfig {
}
/// Body stage configuration.
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Serialize)]
pub struct BodiesConfig {
/// The batch size of non-empty blocks per one request
pub downloader_request_limit: u64,
@@ -124,8 +134,21 @@ impl Default for BodiesConfig {
}
}
impl From<BodiesConfig> for BodiesDownloaderBuilder {
fn from(config: BodiesConfig) -> Self {
BodiesDownloaderBuilder::default()
.with_stream_batch_size(config.downloader_stream_batch_size)
.with_request_limit(config.downloader_request_limit)
.with_max_buffered_responses(config.downloader_max_buffered_responses)
.with_concurrent_requests_range(
config.downloader_min_concurrent_requests..=
config.downloader_max_concurrent_requests,
)
}
}
/// Sender recovery stage configuration.
#[derive(Debug, Clone, Deserialize, PartialEq, Eq, Serialize)]
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Eq, Serialize)]
pub struct SenderRecoveryConfig {
/// The maximum number of blocks to process before committing progress to the database.
pub commit_threshold: u64,
@@ -140,7 +163,7 @@ impl Default for SenderRecoveryConfig {
}
/// Execution stage configuration.
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]
#[derive(Debug, Clone, Copy, Deserialize, PartialEq, Serialize)]
pub struct ExecutionConfig {
/// The maximum number of blocks to execution before committing progress to the database.
pub commit_threshold: u64,

View File

@@ -1,4 +1,4 @@
use reth_primitives::{ChainSpec, GOERLI, MAINNET, SEPOLIA};
use reth_primitives::{AllGenesisFormats, ChainSpec, GOERLI, MAINNET, SEPOLIA};
use std::path::PathBuf;
/// Clap value parser for [ChainSpec]s that takes either a built-in chainspec or the path
@@ -14,3 +14,18 @@ pub fn chain_spec_value_parser(s: &str) -> Result<ChainSpec, eyre::Error> {
}
})
}
/// Clap value parser for [ChainSpec]s that takes either a built-in genesis format or the path
/// to a custom one.
pub fn genesis_value_parser(s: &str) -> Result<ChainSpec, eyre::Error> {
Ok(match s {
"mainnet" => MAINNET.clone(),
"goerli" => GOERLI.clone(),
"sepolia" => SEPOLIA.clone(),
_ => {
let raw = std::fs::read_to_string(PathBuf::from(shellexpand::full(s)?.into_owned()))?;
let genesis: AllGenesisFormats = serde_json::from_str(&raw)?;
genesis.into()
}
})
}