mirror of
https://github.com/akula-bft/akula.git
synced 2026-04-19 03:00:13 -04:00
VerifyStage: headers validation using preferified hashes. (#27)
The preverified hashes is a list of known precomputed hashes of every 192-th block in the chain: hash(0), hash(192), hash(384), hash(576), ... The preverified hashes are copied from: https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_mainnet.go https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_ropsten.go When we have a HeaderSlice and need to verify it, the algorithm verifies that the top of the slice matches one of the preverified hashes, and that all blocks down to the root of the slice are properly connected by the parent_hash field. For example, if we have a HeaderSlice[192...384] (with block headers from 192 to 384 inclusive), it verifies that: hash(slice[384]) == preverified hash(384) hash(slice[383]) == slice[384].parent_hash hash(slice[382]) == slice[383].parent_hash ... hash(slice[192]) == slice[193].parent_hash Thus verifying hashes of all the headers.
This commit is contained in:
@@ -2,8 +2,9 @@ use crate::downloader::{
|
||||
chain_config::{ChainConfig, ChainsConfig},
|
||||
headers::{
|
||||
fetch_receive_stage::FetchReceiveStage, fetch_request_stage::FetchRequestStage,
|
||||
header_slices::HeaderSlices, refill_stage::RefillStage, retry_stage::RetryStage,
|
||||
save_stage::SaveStage, verify_stage::VerifyStage,
|
||||
header_slices::HeaderSlices, preverified_hashes_config::PreverifiedHashesConfig,
|
||||
refill_stage::RefillStage, retry_stage::RetryStage, save_stage::SaveStage,
|
||||
verify_stage::VerifyStage,
|
||||
},
|
||||
opts::Opts,
|
||||
sentry_client,
|
||||
@@ -79,7 +80,10 @@ impl Downloader {
|
||||
|
||||
let retry_stage = RetryStage::new(Arc::clone(&header_slices));
|
||||
|
||||
let verify_stage = VerifyStage::new(Arc::clone(&header_slices));
|
||||
let verify_stage = VerifyStage::new(
|
||||
Arc::clone(&header_slices),
|
||||
PreverifiedHashesConfig::new(&self.opts.chain_name)?,
|
||||
);
|
||||
|
||||
let save_stage = SaveStage::new(Arc::clone(&header_slices));
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod header_slices;
|
||||
|
||||
pub mod fetch_receive_stage;
|
||||
pub mod fetch_request_stage;
|
||||
pub mod preverified_hashes_config;
|
||||
pub mod refill_stage;
|
||||
pub mod retry_stage;
|
||||
pub mod save_stage;
|
||||
@@ -14,5 +15,6 @@ pub use ui_crossterm::HeaderSlicesView;
|
||||
|
||||
#[cfg(not(feature = "crossterm"))]
|
||||
pub mod ui_tracing;
|
||||
|
||||
#[cfg(not(feature = "crossterm"))]
|
||||
pub use ui_tracing::HeaderSlicesView;
|
||||
|
||||
56
src/downloader/headers/preverified_hashes_config.rs
Normal file
56
src/downloader/headers/preverified_hashes_config.rs
Normal file
@@ -0,0 +1,56 @@
|
||||
use serde::{de, Deserialize};
|
||||
use std::str::FromStr;
|
||||
|
||||
/// The preverified hashes is a list of known precomputed hashes of every 192-th block in the chain:
|
||||
///
|
||||
/// hash(0), hash(192), hash(384), hash(576), ...
|
||||
///
|
||||
/// The preverified hashes are copied from:
|
||||
/// https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_mainnet.go
|
||||
/// https://github.com/ledgerwatch/erigon/blob/devel/turbo/stages/headerdownload/preverified_hashes_ropsten.go
|
||||
pub struct PreverifiedHashesConfig {
|
||||
pub hashes: Vec<ethereum_types::H256>,
|
||||
}
|
||||
|
||||
struct UnprefixedHexH256(pub ethereum_types::H256);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PreverifiedHashesConfigUnprefixedHex {
|
||||
pub hashes: Vec<UnprefixedHexH256>,
|
||||
}
|
||||
|
||||
impl PreverifiedHashesConfig {
|
||||
pub fn new(chain_name: &str) -> anyhow::Result<Self> {
|
||||
let config_text = match chain_name {
|
||||
"mainnet" => include_str!("preverified_hashes_mainnet.toml"),
|
||||
"ropsten" => include_str!("preverified_hashes_ropsten.toml"),
|
||||
_ => anyhow::bail!("unsupported chain"),
|
||||
};
|
||||
let config: PreverifiedHashesConfigUnprefixedHex = toml::from_str(config_text)?;
|
||||
Ok(PreverifiedHashesConfig {
|
||||
hashes: config.hashes.iter().map(|hash| hash.0).collect(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for UnprefixedHexH256 {
|
||||
type Err = hex::FromHexError;
|
||||
|
||||
fn from_str(hash_str: &str) -> Result<Self, Self::Err> {
|
||||
let mut hash_bytes = [0u8; 32];
|
||||
hex::decode_to_slice(hash_str, &mut hash_bytes)?;
|
||||
let hash = ethereum_types::H256::from(hash_bytes);
|
||||
|
||||
Ok(UnprefixedHexH256(hash))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for UnprefixedHexH256 {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: de::Deserializer<'de>,
|
||||
{
|
||||
let hash_str = String::deserialize(deserializer)?;
|
||||
FromStr::from_str(&hash_str).map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
66098
src/downloader/headers/preverified_hashes_mainnet.toml
Normal file
66098
src/downloader/headers/preverified_hashes_mainnet.toml
Normal file
File diff suppressed because it is too large
Load Diff
54695
src/downloader/headers/preverified_hashes_ropsten.toml
Normal file
54695
src/downloader/headers/preverified_hashes_ropsten.toml
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,8 @@
|
||||
use crate::downloader::headers::header_slices::{HeaderSlice, HeaderSliceStatus, HeaderSlices};
|
||||
use crate::downloader::headers::{
|
||||
header_slices,
|
||||
header_slices::{HeaderSlice, HeaderSliceStatus, HeaderSlices},
|
||||
preverified_hashes_config::PreverifiedHashesConfig,
|
||||
};
|
||||
use parking_lot::lock_api::RwLockUpgradableReadGuard;
|
||||
use std::{cell::RefCell, ops::DerefMut, sync::Arc};
|
||||
use tokio::sync::watch;
|
||||
@@ -7,15 +11,20 @@ use tracing::*;
|
||||
/// Checks that block hashes are matching the expected ones and sets Verified status.
|
||||
pub struct VerifyStage {
|
||||
header_slices: Arc<HeaderSlices>,
|
||||
preverified_hashes: PreverifiedHashesConfig,
|
||||
pending_watch: RefCell<watch::Receiver<usize>>,
|
||||
}
|
||||
|
||||
impl VerifyStage {
|
||||
pub fn new(header_slices: Arc<HeaderSlices>) -> Self {
|
||||
pub fn new(
|
||||
header_slices: Arc<HeaderSlices>,
|
||||
preverified_hashes: PreverifiedHashesConfig,
|
||||
) -> Self {
|
||||
let pending_watch = header_slices.watch_status_changes(HeaderSliceStatus::Downloaded);
|
||||
|
||||
Self {
|
||||
header_slices,
|
||||
preverified_hashes,
|
||||
pending_watch: RefCell::new(pending_watch),
|
||||
}
|
||||
}
|
||||
@@ -63,8 +72,62 @@ impl VerifyStage {
|
||||
})
|
||||
}
|
||||
|
||||
fn verify_slice(&self, _slice: &HeaderSlice) -> bool {
|
||||
// TODO: verify hashes properly
|
||||
rand::random::<u8>() < 224
|
||||
/// The algorithm verifies that the top of the slice matches one of the preverified hashes,
|
||||
/// and that all blocks down to the root of the slice are connected by the parent_hash field.
|
||||
///
|
||||
/// For example, if we have a HeaderSlice[192...384]
|
||||
/// (with block headers from 192 to 384 inclusive), it verifies that:
|
||||
///
|
||||
/// hash(slice[384]) == preverified hash(384)
|
||||
/// hash(slice[383]) == slice[384].parent_hash
|
||||
/// hash(slice[382]) == slice[383].parent_hash
|
||||
/// ...
|
||||
/// hash(slice[192]) == slice[193].parent_hash
|
||||
///
|
||||
/// Thus verifying hashes of all the headers.
|
||||
fn verify_slice(&self, slice: &HeaderSlice) -> bool {
|
||||
if slice.headers.is_none() {
|
||||
return false;
|
||||
}
|
||||
let headers = slice.headers.as_ref().unwrap();
|
||||
|
||||
if headers.is_empty() {
|
||||
return true;
|
||||
}
|
||||
|
||||
let last = headers.last().unwrap();
|
||||
let last_hash = last.hash();
|
||||
let expected_last_hash =
|
||||
self.preverified_hash(slice.start_block_num + headers.len() as u64 - 1);
|
||||
if expected_last_hash.is_none() {
|
||||
return false;
|
||||
}
|
||||
if last_hash != *expected_last_hash.unwrap() {
|
||||
return false;
|
||||
}
|
||||
|
||||
for child_index in (1..headers.len()).rev() {
|
||||
let parent_index = child_index - 1;
|
||||
|
||||
let child = &headers[child_index];
|
||||
let parent = &headers[parent_index];
|
||||
|
||||
let parent_hash = parent.hash();
|
||||
let expected_parent_hash = child.parent_hash;
|
||||
if parent_hash != expected_parent_hash {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn preverified_hash(&self, block_num: u64) -> Option<ðereum_types::H256> {
|
||||
let preverified_step_size = header_slices::HEADER_SLICE_SIZE as u64;
|
||||
if block_num % preverified_step_size != 0 {
|
||||
return None;
|
||||
}
|
||||
let index = block_num / preverified_step_size;
|
||||
self.preverified_hashes.hashes.get(index as usize)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user