diff --git a/bin/darkfid/src/task/sync.rs b/bin/darkfid/src/task/sync.rs index 0c76b9294..2236b6757 100644 --- a/bin/darkfid/src/task/sync.rs +++ b/bin/darkfid/src/task/sync.rs @@ -42,15 +42,19 @@ use crate::{ pub async fn sync_task(node: &Darkfid, checkpoint: Option<(u32, HeaderHash)>) -> Result<()> { info!(target: "darkfid::task::sync_task", "Starting blockchain sync..."); - // Generate a new fork to be able to extend - info!(target: "darkfid::task::sync_task", "Generating new empty fork..."); - node.validator.consensus.generate_empty_fork().await?; - // Grab blocks subscriber let block_sub = node.subscribers.get("blocks").unwrap(); // Grab last known block header, including existing pending sync ones let mut last = node.validator.blockchain.last()?; + + // If checkpoint is not reached, purge headers and start syncing from scratch + if let Some(checkpoint) = checkpoint { + if checkpoint.0 > last.0 { + node.validator.blockchain.headers.remove_all_sync()?; + } + } + // Check sync headers first record is the next one if let Some(next) = node.validator.blockchain.headers.get_first_sync()? { if next.height == last.0 + 1 { @@ -77,10 +81,8 @@ pub async fn sync_task(node: &Darkfid, checkpoint: Option<(u32, HeaderHash)>) -> // We use the next height, in order to also retrieve the checkpoint header. retrieve_headers(node, &common_tip_peers, last.0, checkpoint.0 + 1).await?; - // TODO: Create a more minimal verification so checkpoint blocks can be - // applied directly, skipping the full formal block checks. // Retrieve all the blocks for those headers and apply them to canonical - last = retrieve_blocks(node, &common_tip_peers, last, block_sub).await?; + last = retrieve_blocks(node, &common_tip_peers, last, block_sub, true).await?; info!(target: "darkfid::task::sync_task", "Last received block: {} - {}", last.0, last.1); // Grab synced peers most common tip again @@ -88,6 +90,10 @@ pub async fn sync_task(node: &Darkfid, checkpoint: Option<(u32, HeaderHash)>) -> } } + // Generate a new fork to be able to extend + info!(target: "darkfid::task::sync_task", "Generating new empty fork..."); + node.validator.consensus.generate_empty_fork().await?; + // Sync headers and blocks loop { // Retrieve all the headers backwards until our last known one and verify them. @@ -95,7 +101,8 @@ pub async fn sync_task(node: &Darkfid, checkpoint: Option<(u32, HeaderHash)>) -> retrieve_headers(node, &common_tip_peers, last.0, common_tip_height + 1).await?; // Retrieve all the blocks for those headers and apply them to canonical - let last_received = retrieve_blocks(node, &common_tip_peers, last, block_sub).await?; + let last_received = + retrieve_blocks(node, &common_tip_peers, last, block_sub, false).await?; info!(target: "darkfid::task::sync_task", "Last received block: {} - {}", last_received.0, last_received.1); if last == last_received { @@ -340,6 +347,7 @@ async fn retrieve_blocks( peers: &[ChannelPtr], last_known: (u32, HeaderHash), block_sub: &JsonSubscriber, + checkpoint_blocks: bool, ) -> Result<(u32, HeaderHash)> { info!(target: "darkfid::task::sync::retrieve_blocks", "Retrieving missing blocks from peers..."); let mut last_received = last_known; @@ -358,9 +366,15 @@ async fn retrieve_blocks( if headers.is_empty() { break 'blocks_loop } + let mut headers_hashes = Vec::with_capacity(headers.len()); + let mut synced_headers = Vec::with_capacity(headers.len()); + for header in &headers { + headers_hashes.push(header.hash()); + synced_headers.push(header.height); + } // Node creates a `SyncRequest` and sends it - let request = SyncRequest { headers: headers.iter().map(|h| h.hash()).collect() }; + let request = SyncRequest { headers: headers_hashes.clone() }; peer.send(&request).await?; // Node waits for response @@ -371,26 +385,41 @@ async fn retrieve_blocks( // Verify and store retrieved blocks debug!(target: "darkfid::task::sync::retrieve_blocks", "Processing received blocks"); received_blocks += response.blocks.len(); - let mut synced_headers = Vec::with_capacity(response.blocks.len()); - for block in &response.blocks { - node.validator.append_proposal(&Proposal::new(block.clone())).await?; - synced_headers.push(block.header.height); - last_received = (block.header.height, block.hash()); + if checkpoint_blocks { + node.validator.add_checkpoint_blocks(&response.blocks, &headers_hashes).await?; + } else { + for block in &response.blocks { + node.validator.append_proposal(&Proposal::new(block.clone())).await?; + } } + last_received = (*synced_headers.last().unwrap(), *headers_hashes.last().unwrap()); // Remove synced headers node.validator.blockchain.headers.remove_sync(&synced_headers)?; - // Perform finalization for received blocks - let finalized = node.validator.finalization().await?; - if !finalized.is_empty() { + if checkpoint_blocks { // Notify subscriber - let mut notif_blocks = Vec::with_capacity(finalized.len()); - for block in finalized { + let mut notif_blocks = Vec::with_capacity(response.blocks.len()); + info!(target: "darkfid::task::sync::retrieve_blocks", "Blocks added:"); + for (index, block) in response.blocks.iter().enumerate() { + info!(target: "darkfid::task::sync::retrieve_blocks", "\t{} - {}", headers_hashes[index], headers[index].height); notif_blocks - .push(JsonValue::String(base64::encode(&serialize_async(&block).await))); + .push(JsonValue::String(base64::encode(&serialize_async(block).await))); } block_sub.notify(JsonValue::Array(notif_blocks)).await; + } else { + // Perform finalization for received blocks + let finalized = node.validator.finalization().await?; + if !finalized.is_empty() { + // Notify subscriber + let mut notif_blocks = Vec::with_capacity(finalized.len()); + for block in finalized { + notif_blocks.push(JsonValue::String(base64::encode( + &serialize_async(&block).await, + ))); + } + block_sub.notify(JsonValue::Array(notif_blocks)).await; + } } info!(target: "darkfid::task::sync::retrieve_blocks", "Blocks received: {}/{}", received_blocks, total); diff --git a/src/validator/mod.rs b/src/validator/mod.rs index dd5760ebf..0129fb94a 100644 --- a/src/validator/mod.rs +++ b/src/validator/mod.rs @@ -26,7 +26,7 @@ use smol::lock::RwLock; use crate::{ blockchain::{ block_store::{BlockDifficulty, BlockInfo, BlockRanks}, - Blockchain, BlockchainOverlay, + Blockchain, BlockchainOverlay, HeaderHash, }, error::TxVerifyFailed, tx::Transaction, @@ -44,7 +44,8 @@ use pow::PoWModule; /// Verification functions pub mod verification; use verification::{ - verify_block, verify_genesis_block, verify_producer_transaction, verify_transactions, + verify_block, verify_checkpoint_block, verify_genesis_block, verify_producer_transaction, + verify_transactions, }; /// Fee calculation helpers @@ -365,11 +366,101 @@ impl Validator { Ok(finalized_blocks) } + /// Apply provided set of [`BlockInfo`] without doing formal verification. + /// A set of ['HeaderHash`] is also provided, to verify that the provided + /// block hash matches the expected header one. + /// Note: this function should only be used for blocks received using a + /// checkpoint, since in that case we enforce the node to follow the sequence, + /// assuming they all its blocks are valid. + pub async fn add_checkpoint_blocks( + &self, + blocks: &[BlockInfo], + headers: &[HeaderHash], + ) -> Result<()> { + // Check provided sequences are the same length + if blocks.len() != headers.len() { + return Err(Error::InvalidInputLengths) + } + + debug!(target: "validator::add_checkpoint_blocks", "Instantiating BlockchainOverlay"); + let overlay = BlockchainOverlay::new(&self.blockchain)?; + + // Retrieve last block difficulty to access current ranks + let last_difficulty = self.blockchain.last_block_difficulty()?; + let mut current_targets_rank = last_difficulty.ranks.targets_rank; + let mut current_hashes_rank = last_difficulty.ranks.hashes_rank; + + // Grab current PoW module to validate each block + let mut module = self.consensus.module.read().await.clone(); + + // Keep track of all blocks transactions to remove them from pending txs store + let mut removed_txs = vec![]; + + // Validate and insert each block + for (index, block) in blocks.iter().enumerate() { + // Verify block + match verify_checkpoint_block(&overlay, block, &headers[index]).await { + Ok(()) => { /* Do nothing */ } + // Skip already existing block + Err(Error::BlockAlreadyExists(_)) => continue, + Err(e) => { + error!(target: "validator::add_checkpoint_blocks", "Erroneous block found in set: {}", e); + overlay.lock().unwrap().overlay.lock().unwrap().purge_new_trees()?; + return Err(Error::BlockIsInvalid(block.hash().as_string())) + } + }; + + // Grab next mine target and difficulty + let (next_target, next_difficulty) = module.next_mine_target_and_difficulty()?; + + // Calculate block rank + let (target_distance_sq, hash_distance_sq) = block_rank(block, &next_target); + + // Update current ranks + current_targets_rank += target_distance_sq.clone(); + current_hashes_rank += hash_distance_sq.clone(); + + // Generate block difficulty and update PoW module + let cummulative_difficulty = + module.cummulative_difficulty.clone() + next_difficulty.clone(); + let ranks = BlockRanks::new( + target_distance_sq, + current_targets_rank.clone(), + hash_distance_sq, + current_hashes_rank.clone(), + ); + let block_difficulty = BlockDifficulty::new( + block.header.height, + block.header.timestamp, + next_difficulty, + cummulative_difficulty, + ranks, + ); + module.append_difficulty(&overlay, block_difficulty)?; + + // Store block transactions + for tx in &block.txs { + removed_txs.push(tx.clone()); + } + } + + debug!(target: "validator::add_checkpoint_blocks", "Applying overlay changes"); + overlay.lock().unwrap().overlay.lock().unwrap().apply()?; + + // Remove blocks transactions from pending txs store + self.blockchain.remove_pending_txs(&removed_txs)?; + + // Update PoW module + *self.consensus.module.write().await = module; + + Ok(()) + } + /// Validate a set of [`BlockInfo`] in sequence and apply them if all are valid. /// Note: this function should only be used in tests when we don't want to /// perform consensus logic. pub async fn add_test_blocks(&self, blocks: &[BlockInfo]) -> Result<()> { - debug!(target: "validator::add_blocks", "Instantiating BlockchainOverlay"); + debug!(target: "validator::add_test_blocks", "Instantiating BlockchainOverlay"); let overlay = BlockchainOverlay::new(&self.blockchain)?; // Retrieve last block @@ -388,17 +479,19 @@ impl Validator { // Validate and insert each block for block in blocks { - // Skip already existing block - if overlay.lock().unwrap().has_block(block)? { - previous = block; - continue; - } - // Verify block - if verify_block(&overlay, &module, block, previous).await.is_err() { - error!(target: "validator::add_blocks", "Erroneous block found in set"); - overlay.lock().unwrap().overlay.lock().unwrap().purge_new_trees()?; - return Err(Error::BlockIsInvalid(block.hash().as_string())) + match verify_block(&overlay, &module, block, previous).await { + Ok(()) => { /* Do nothing */ } + // Skip already existing block + Err(Error::BlockAlreadyExists(_)) => { + previous = block; + continue + } + Err(e) => { + error!(target: "validator::add_test_blocks", "Erroneous block found in set: {}", e); + overlay.lock().unwrap().overlay.lock().unwrap().purge_new_trees()?; + return Err(Error::BlockIsInvalid(block.hash().as_string())) + } }; // Grab next mine target and difficulty @@ -438,7 +531,7 @@ impl Validator { previous = block; } - debug!(target: "validator::add_blocks", "Applying overlay changes"); + debug!(target: "validator::add_test_blocks", "Applying overlay changes"); overlay.lock().unwrap().overlay.lock().unwrap().apply()?; // Purge pending erroneous txs since canonical state has been changed diff --git a/src/validator/verification.rs b/src/validator/verification.rs index 77bd077a9..b512c2785 100644 --- a/src/validator/verification.rs +++ b/src/validator/verification.rs @@ -36,6 +36,7 @@ use smol::io::Cursor; use crate::{ blockchain::{ block_store::append_tx_to_merkle_tree, BlockInfo, Blockchain, BlockchainOverlayPtr, + HeaderHash, }, error::TxVerifyFailed, runtime::vm_runtime::Runtime, @@ -49,7 +50,7 @@ use crate::{ Error, Result, }; -/// Verify given genesis [`BlockInfo`], and apply it to the provided overlay +/// Verify given genesis [`BlockInfo`], and apply it to the provided overlay. pub async fn verify_genesis_block(overlay: &BlockchainOverlayPtr, block: &BlockInfo) -> Result<()> { let block_hash = block.hash().as_string(); debug!(target: "validator::verification::verify_genesis_block", "Validating genesis block {}", block_hash); @@ -164,7 +165,7 @@ pub fn validate_blockchain( Ok(()) } -/// Verify given [`BlockInfo`], and apply it to the provided overlay +/// Verify given [`BlockInfo`], and apply it to the provided overlay. pub async fn verify_block( overlay: &BlockchainOverlayPtr, module: &PoWModule, @@ -225,7 +226,70 @@ pub async fn verify_block( Ok(()) } -/// Verify block proposer signature, using the proposal transaction signature as signing key +/// Verify given checkpoint [`BlockInfo`], and apply it to the provided overlay. +pub async fn verify_checkpoint_block( + overlay: &BlockchainOverlayPtr, + block: &BlockInfo, + header: &HeaderHash, +) -> Result<()> { + let block_hash = block.hash(); + debug!(target: "validator::verification::verify_checkpoint_block", "Validating block {}", block_hash); + + // Check if block already exists + if overlay.lock().unwrap().has_block(block)? { + return Err(Error::BlockAlreadyExists(block_hash.as_string())) + } + + // Check if block hash matches the expected(provided) one + if block_hash != *header { + error!(target: "validator::verification::verify_checkpoint_block", "Block hash doesn't match the expected one"); + return Err(Error::BlockIsInvalid(block_hash.as_string())) + } + + // Verify transactions vector contains at least one(producers) transaction + if block.txs.is_empty() { + return Err(Error::BlockContainsNoTransactions(block_hash.as_string())) + } + + // Apply transactions, exluding producer(last) one + let mut tree = MerkleTree::new(1); + let txs = &block.txs[..block.txs.len() - 1]; + let e = apply_transactions(overlay, block.header.height, txs, &mut tree).await; + if let Err(e) = e { + warn!( + target: "validator::verification::verify_checkpoint_block", + "[VALIDATOR] Erroneous transactions found in set", + ); + overlay.lock().unwrap().overlay.lock().unwrap().purge_new_trees()?; + return Err(e) + } + + // Apply producer transaction + let public_key = apply_producer_transaction( + overlay, + block.header.height, + block.txs.last().unwrap(), + &mut tree, + ) + .await?; + + // Verify transactions merkle tree root matches header one + if tree.root(0).unwrap() != block.header.root { + error!(target: "validator::verification::verify_checkpoint_block", "Block Merkle tree root is invalid"); + return Err(Error::BlockIsInvalid(block_hash.as_string())) + } + + // Verify producer signature + verify_producer_signature(block, &public_key)?; + + // Insert block + overlay.lock().unwrap().add_block(block)?; + + debug!(target: "validator::verification::verify_checkpoint_block", "Block {} verified successfully", block_hash); + Ok(()) +} + +/// Verify block proposer signature, using the producer transaction signature as signing key /// over blocks header hash. pub fn verify_producer_signature(block: &BlockInfo, public_key: &PublicKey) -> Result<()> { if !public_key.verify(block.header.hash().inner(), &block.signature) { @@ -246,7 +310,7 @@ pub async fn verify_producer_transaction( tree: &mut MerkleTree, ) -> Result { let tx_hash = tx.hash(); - debug!(target: "validator::verification::verify_producer_transaction", "Validating proposal transaction {}", tx_hash); + debug!(target: "validator::verification::verify_producer_transaction", "Validating producer transaction {}", tx_hash); // Producer transactions must contain a single, non-empty call if tx.calls.len() != 1 || tx.calls[0].data.data.is_empty() { @@ -303,7 +367,7 @@ pub async fn verify_producer_transaction( // Check that only one ZK proof and signature public key exist if zkp_pub.len() != 1 || sig_pub.len() != 1 { - error!(target: "validator::verification::verify_producer_transaction", "Proposal contains multiple ZK proofs or signature public keys"); + error!(target: "validator::verification::verify_producer_transaction", "Producer transaction contains multiple ZK proofs or signature public keys"); return Err(TxVerifyFailed::ErroneousTxs(vec![tx.clone()]).into()) } @@ -359,7 +423,7 @@ pub async fn verify_producer_transaction( debug!(target: "validator::verification::verify_producer_transaction", "Verifying ZK proofs for transaction {}", tx_hash); if let Err(e) = tx.verify_zkps(&verifying_keys, zkp_table).await { - error!(target: "validator::verification::verify_proposal_transaction", "ZK proof verification for tx {} failed: {}", tx_hash, e); + error!(target: "validator::verification::verify_producer_transaction", "ZK proof verification for tx {} failed: {}", tx_hash, e); return Err(TxVerifyFailed::InvalidZkProof.into()) } debug!(target: "validator::verification::verify_producer_transaction", "ZK proof verification successful"); @@ -367,7 +431,80 @@ pub async fn verify_producer_transaction( // Append hash to merkle tree append_tx_to_merkle_tree(tree, tx); - debug!(target: "validator::verification::verify_producer_transaction", "Proposal transaction {} verified successfully", tx_hash); + debug!(target: "validator::verification::verify_producer_transaction", "Producer transaction {} verified successfully", tx_hash); + + Ok(signature_public_key) +} + +/// Apply given producer [`Transaction`] to the provided overlay, without formal verification. +/// Returns transaction signature public key. Additionally, append its hash to the provided Merkle tree. +async fn apply_producer_transaction( + overlay: &BlockchainOverlayPtr, + verifying_block_height: u32, + tx: &Transaction, + tree: &mut MerkleTree, +) -> Result { + let tx_hash = tx.hash(); + debug!(target: "validator::verification::apply_producer_transaction", "Applying producer transaction {}", tx_hash); + + // Producer transactions must contain a single, non-empty call + if tx.calls.len() != 1 || tx.calls[0].data.data.is_empty() { + return Err(TxVerifyFailed::ErroneousTxs(vec![tx.clone()]).into()) + } + + debug!(target: "validator::verification::apply_producer_transaction", "Executing contract call"); + + // Write the actual payload data + let mut payload = vec![]; + tx.calls.encode_async(&mut payload).await?; // Actual call data + + debug!(target: "validator::verification::apply_producer_transaction", "Instantiating WASM runtime"); + let call = &tx.calls[0]; + let wasm = overlay.lock().unwrap().contracts.get(call.data.contract_id)?; + + let mut runtime = Runtime::new( + &wasm, + overlay.clone(), + call.data.contract_id, + verifying_block_height, + tx_hash, + // Call index in producer tx is 0 + 0, + )?; + + debug!(target: "validator::verification::apply_producer_transaction", "Executing \"metadata\" call"); + let metadata = runtime.metadata(&payload)?; + + // Decode the metadata retrieved from the execution + let mut decoder = Cursor::new(&metadata); + + // The tuple is (zkas_ns, public_inputs) + let _: Vec<(String, Vec)> = AsyncDecodable::decode_async(&mut decoder).await?; + let sig_pub: Vec = AsyncDecodable::decode_async(&mut decoder).await?; + + // Check that only one ZK proof and signature public key exist + if sig_pub.len() != 1 { + error!(target: "validator::verification::apply_producer_transaction", "Producer transaction contains multiple ZK proofs or signature public keys"); + return Err(TxVerifyFailed::ErroneousTxs(vec![tx.clone()]).into()) + } + + let signature_public_key = *sig_pub.last().unwrap(); + + // After getting the metadata, we run the "exec" function with the same runtime + // and the same payload. + debug!(target: "validator::verification::apply_producer_transaction", "Executing \"exec\" call"); + let state_update = runtime.exec(&payload)?; + debug!(target: "validator::verification::apply_producer_transaction", "Successfully executed \"exec\" call"); + + // If that was successful, we apply the state update in the ephemeral overlay. + debug!(target: "validator::verification::apply_producer_transaction", "Executing \"apply\" call"); + runtime.apply(&state_update)?; + debug!(target: "validator::verification::apply_producer_transaction", "Successfully executed \"apply\" call"); + + // Append hash to merkle tree + append_tx_to_merkle_tree(tree, tx); + + debug!(target: "validator::verification::apply_producer_transaction", "Pruducer transaction {} executed successfully", tx_hash); Ok(signature_public_key) } @@ -616,6 +753,77 @@ pub async fn verify_transaction( Ok(gas_used) } +/// Apply given [`Transaction`] to the provided overlay. +/// Additionally, append its hash to the provided Merkle tree. +async fn apply_transaction( + overlay: &BlockchainOverlayPtr, + verifying_block_height: u32, + tx: &Transaction, + tree: &mut MerkleTree, +) -> Result<()> { + let tx_hash = tx.hash(); + debug!(target: "validator::verification::apply_transaction", "Applying transaction {}", tx_hash); + + // Iterate over all calls to get the metadata + for (idx, call) in tx.calls.iter().enumerate() { + debug!(target: "validator::verification::apply_transaction", "Executing contract call {}", idx); + + // Write the actual payload data + let mut payload = vec![]; + tx.calls.encode_async(&mut payload).await?; + + debug!(target: "validator::verification::apply_transaction", "Instantiating WASM runtime"); + let wasm = overlay.lock().unwrap().contracts.get(call.data.contract_id)?; + let mut runtime = Runtime::new( + &wasm, + overlay.clone(), + call.data.contract_id, + verifying_block_height, + tx_hash, + idx as u8, + )?; + + // Run the "exec" function + debug!(target: "validator::verification::apply_transaction", "Executing \"exec\" call"); + let state_update = runtime.exec(&payload)?; + debug!(target: "validator::verification::apply_transaction", "Successfully executed \"exec\" call"); + + // If that was successful, we apply the state update in the ephemeral overlay + debug!(target: "validator::verification::apply_transaction", "Executing \"apply\" call"); + runtime.apply(&state_update)?; + debug!(target: "validator::verification::apply_transaction", "Successfully executed \"apply\" call"); + + // If this call is supposed to deploy a new contract, we have to instantiate + // a new `Runtime` and run its deploy function. + if call.data.contract_id == *DEPLOYOOOR_CONTRACT_ID && call.data.data[0] == 0x00 + /* DeployV1 */ + { + debug!(target: "validator::verification::apply_transaction", "Deploying new contract"); + // Deserialize the deployment parameters + let deploy_params: DeployParamsV1 = deserialize_async(&call.data.data[1..]).await?; + let deploy_cid = ContractId::derive_public(deploy_params.public_key); + + // Instantiate the new deployment runtime + let mut deploy_runtime = Runtime::new( + &deploy_params.wasm_bincode, + overlay.clone(), + deploy_cid, + verifying_block_height, + tx_hash, + idx as u8, + )?; + + deploy_runtime.deploy(&deploy_params.ix)?; + } + } + + // Append hash to merkle tree + append_tx_to_merkle_tree(tree, tx); + + debug!(target: "validator::verification::apply_transaction", "Transaction {} applied successfully", tx_hash); + Ok(()) +} + /// Verify a set of [`Transaction`] in sequence and apply them if all are valid. /// In case any of the transactions fail, they will be returned to the caller as an error. /// If all transactions are valid, the function will return the accumulated gas used from @@ -663,11 +871,45 @@ pub async fn verify_transactions( } } - if erroneous_txs.is_empty() { - Ok(gas_used) - } else { - Err(TxVerifyFailed::ErroneousTxs(erroneous_txs).into()) + if !erroneous_txs.is_empty() { + return Err(TxVerifyFailed::ErroneousTxs(erroneous_txs).into()) } + + Ok(gas_used) +} + +/// Apply given set of [`Transaction`] in sequence, without formal verification. +/// In case any of the transactions fail, they will be returned to the caller as an error. +/// Additionally, their hash is appended to the provided Merkle tree. +async fn apply_transactions( + overlay: &BlockchainOverlayPtr, + verifying_block_height: u32, + txs: &[Transaction], + tree: &mut MerkleTree, +) -> Result<()> { + debug!(target: "validator::verification::apply_transactions", "Applying {} transactions", txs.len()); + if txs.is_empty() { + return Ok(()) + } + + // Tracker for failed txs + let mut erroneous_txs = vec![]; + + // Iterate over transactions and attempt to apply them + for tx in txs { + overlay.lock().unwrap().checkpoint(); + if let Err(e) = apply_transaction(overlay, verifying_block_height, tx, tree).await { + warn!(target: "validator::verification::apply_transactions", "Transaction apply failed: {}", e); + erroneous_txs.push(tx.clone()); + overlay.lock().unwrap().revert_to_checkpoint()?; + }; + } + + if !erroneous_txs.is_empty() { + return Err(TxVerifyFailed::ErroneousTxs(erroneous_txs).into()) + } + + Ok(()) } /// Verify given [`Proposal`] against provided consensus state,