validator: decoupled verification functions so we can chain them using same overlay

This commit is contained in:
aggstam
2023-06-28 20:52:29 +03:00
parent 97445fca25
commit c77ed67df1
15 changed files with 279 additions and 279 deletions

View File

@@ -115,12 +115,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
mint_verify_times.push(timer.elapsed());
// TODO: Witness and add to wallet merkle tree?
@@ -191,12 +186,7 @@ async fn integration_test() -> Result<()> {
let sigs = tx.create_sigs(&mut OsRng, &vec![dao_th.faucet_kp.secret])?;
tx.signatures = vec![sigs];
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
// Wallet stuff
@@ -310,7 +300,7 @@ async fn integration_test() -> Result<()> {
.alice_validator
.read()
.await
.verify_transactions(&[tx1.clone(), tx2.clone(), tx3.clone()], current_slot, true)
.add_transactions(&[tx1.clone(), tx2.clone(), tx3.clone()], current_slot, true)
.await?;
// Wallet
@@ -440,12 +430,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
propose_verify_times.push(timer.elapsed());
//// Wallet
@@ -550,12 +535,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
vote_verify_times.push(timer.elapsed());
// Secret vote info. Needs to be revealed at some point.
@@ -620,12 +600,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
vote_verify_times.push(timer.elapsed());
let vote_note_2 = {
@@ -687,12 +662,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
vote_verify_times.push(timer.elapsed());
// Secret vote info. Needs to be revealed at some point.
@@ -874,12 +844,7 @@ async fn integration_test() -> Result<()> {
tx.signatures = vec![xfer_sigs, exec_sigs];
let timer = Instant::now();
dao_th
.alice_validator
.read()
.await
.verify_transactions(&[tx.clone()], current_slot, true)
.await?;
dao_th.alice_validator.read().await.add_transactions(&[tx.clone()], current_slot, true).await?;
exec_verify_times.push(timer.elapsed());
// Statistics

View File

@@ -92,7 +92,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::ConsensusGenesisStake).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.consensus_staked_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());
@@ -115,7 +115,7 @@ impl TestHarness {
.validator
.read()
.await
.verify_transactions(txs, slot, false)
.add_transactions(txs, slot, false)
.await
.err()
.unwrap()

View File

@@ -49,7 +49,7 @@ impl TestHarness {
let timer = Instant::now();
// Proposals always extend genesis block
let fork_hash = wallet.validator.read().await.consensus.genesis_block;
let fork_hash = self.genesis_block;
// Building Consensus::Propose params
let proposal_call_debris = ConsensusProposalCallBuilder {
@@ -104,7 +104,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::ConsensusProposal).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.consensus_staked_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());
@@ -127,7 +127,7 @@ impl TestHarness {
.validator
.read()
.await
.verify_transactions(txs, slot, false)
.add_transactions(txs, slot, false)
.await
.err()
.unwrap()

View File

@@ -128,7 +128,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::ConsensusStake).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.consensus_staked_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());

View File

@@ -125,7 +125,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::ConsensusUnstake).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.money_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());

View File

@@ -116,7 +116,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::ConsensusUnstakeRequest).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.consensus_unstaked_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());
@@ -139,7 +139,7 @@ impl TestHarness {
.validator
.read()
.await
.verify_transactions(txs, slot, false)
.add_transactions(txs, slot, false)
.await
.err()
.unwrap()

View File

@@ -173,6 +173,7 @@ pub struct TestHarness {
pub holders: HashMap<Holder, Wallet>,
pub proving_keys: HashMap<&'static str, (ProvingKey, ZkBinary)>,
pub tx_action_benchmarks: HashMap<TxAction, TxActionBenchmarks>,
pub genesis_block: blake3::Hash,
}
impl TestHarness {
@@ -273,7 +274,12 @@ impl TestHarness {
// Alice jumps down the rabbit hole
holders.insert(Holder::Alice, alice);
Ok(Self { holders, proving_keys, tx_action_benchmarks })
Ok(Self {
holders,
proving_keys,
tx_action_benchmarks,
genesis_block: genesis_block.blockhash(),
})
}
pub fn gather_owncoin(
@@ -429,8 +435,7 @@ impl TestHarness {
pub async fn generate_slot(&self, id: u64) -> Result<Slot> {
// We grab the genesis slot to generate slot
// using same consensus parameters
let faucet = self.holders.get(&Holder::Faucet).unwrap();
let genesis_block = faucet.validator.read().await.consensus.genesis_block;
let genesis_block = self.genesis_block;
let fork_hashes = vec![genesis_block];
let fork_previous_hashes = vec![genesis_block];
let genesis_slot = self.get_slot_by_slot(0).await?;

View File

@@ -101,7 +101,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyAirdrop).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.money_merkle_tree.append(MerkleNode::from(params.outputs[0].coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());

View File

@@ -92,7 +92,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyGenesisMint).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.money_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());
@@ -115,7 +115,7 @@ impl TestHarness {
.validator
.read()
.await
.verify_transactions(txs, slot, false)
.add_transactions(txs, slot, false)
.await
.err()
.unwrap()

View File

@@ -165,7 +165,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyOtcSwap).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
if append {
for output in &params.outputs {
wallet.money_merkle_tree.append(MerkleNode::from(output.coin.inner()));

View File

@@ -93,7 +93,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyTokenMint).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
wallet.money_merkle_tree.append(MerkleNode::from(params.output.coin.inner()));
tx_action_benchmark.verify_times.push(timer.elapsed());
@@ -151,7 +151,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyTokenFreeze).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
tx_action_benchmark.verify_times.push(timer.elapsed());
Ok(())

View File

@@ -116,7 +116,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyTransfer).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, true).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, true).await?;
if append {
for output in &params.outputs {
wallet.money_merkle_tree.append(MerkleNode::from(output.coin.inner()));
@@ -140,7 +140,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyTransfer).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(txs, slot, true).await?;
wallet.validator.read().await.add_transactions(txs, slot, true).await?;
if append {
for params in txs_params {
for output in &params.outputs {
@@ -164,7 +164,7 @@ impl TestHarness {
self.tx_action_benchmarks.get_mut(&TxAction::MoneyTransfer).unwrap();
let timer = Instant::now();
wallet.validator.read().await.verify_transactions(&[tx.clone()], slot, false).await?;
wallet.validator.read().await.add_transactions(&[tx.clone()], slot, false).await?;
tx_action_benchmark.verify_times.push(timer.elapsed());
Ok(())
@@ -186,7 +186,7 @@ impl TestHarness {
.validator
.read()
.await
.verify_transactions(txs, slot, false)
.add_transactions(txs, slot, false)
.await
.err()
.unwrap()

View File

@@ -16,10 +16,7 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use crate::{
blockchain::{BlockInfo, Blockchain},
util::time::TimeKeeper,
};
use crate::{blockchain::Blockchain, util::time::TimeKeeper};
/// This struct represents the information required by the consensus algorithm
pub struct Consensus {
@@ -27,16 +24,11 @@ pub struct Consensus {
pub blockchain: Blockchain,
/// Helper structure to calculate time related operations
pub time_keeper: TimeKeeper,
/// Genesis block hash
pub genesis_block: blake3::Hash,
}
impl Consensus {
/// Generate a new Consensus state. On init, genesis block
/// hash is the BlockInfo::default one, so caller must
/// set the correct one, if different.
/// Generate a new Consensus state.
pub fn new(blockchain: Blockchain, time_keeper: TimeKeeper) -> Self {
let genesis_block = BlockInfo::default().blockhash();
Self { blockchain, time_keeper, genesis_block }
Self { blockchain, time_keeper }
}
}

View File

@@ -16,20 +16,15 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{collections::HashMap, io::Cursor};
use async_std::sync::{Arc, RwLock};
use darkfi_sdk::{blockchain::Slot, crypto::PublicKey, pasta::pallas};
use darkfi_serial::{Decodable, Encodable, WriteExt};
use log::{debug, error, info, warn};
use darkfi_sdk::{blockchain::Slot, crypto::PublicKey};
use log::{debug, info, warn};
use crate::{
blockchain::{BlockInfo, Blockchain, BlockchainOverlay, BlockchainOverlayPtr},
blockchain::{BlockInfo, Blockchain, BlockchainOverlay},
error::TxVerifyFailed,
runtime::vm_runtime::Runtime,
tx::Transaction,
util::time::TimeKeeper,
zk::VerifyingKey,
Error, Result,
};
@@ -37,6 +32,10 @@ use crate::{
pub mod consensus;
use consensus::Consensus;
/// Verification functions
pub mod verification;
use verification::{verify_block, verify_transactions};
/// Helper utilities
pub mod utils;
use utils::deploy_native_contracts;
@@ -79,31 +78,14 @@ impl Validator {
info!(target: "validator", "Initializing Blockchain");
let blockchain = Blockchain::new(db)?;
info!(target: "validator", "Initializing Consensus");
let consensus = Consensus::new(blockchain.clone(), config.time_keeper.clone());
// Create the actual state
let mut state = Self { blockchain: blockchain.clone(), consensus };
// Create an overlay over whole blockchain so we can write stuff
let blockchain_overlay = BlockchainOverlay::new(&blockchain)?;
// Add genesis block if blockchain is empty
let genesis_block = match blockchain.genesis() {
Ok((_, hash)) => hash,
Err(_) => {
info!(target: "validator", "Appending genesis block");
state
.add_blocks(
blockchain_overlay.clone(),
&config.time_keeper,
&[config.genesis_block.clone()],
)
.await?;
config.genesis_block.blockhash()
}
if blockchain.genesis().is_err() {
info!(target: "validator", "Appending genesis block");
verify_block(blockchain_overlay.clone(), &config.genesis_block, &None)?;
};
state.consensus.genesis_block = genesis_block;
// Deploy native wasm contracts
deploy_native_contracts(
@@ -115,9 +97,14 @@ impl Validator {
// Write the changes to the actual chain db
blockchain_overlay.lock().unwrap().overlay.lock().unwrap().apply()?;
info!(target: "validator", "Initializing Consensus");
let consensus = Consensus::new(blockchain.clone(), config.time_keeper);
// Create the actual state
let state = Arc::new(RwLock::new(Self { blockchain, consensus }));
info!(target: "validator", "Finished initializing validator");
Ok(Arc::new(RwLock::new(state)))
Ok(state)
}
// ==========================
@@ -132,159 +119,28 @@ impl Validator {
// 2) When a transaction is being broadcasted to us
// ==========================
/// Append provided blocks to the provided overlay. Block sequence must be valid,
/// meaning that each block and its transactions are valid, in order.
pub async fn add_blocks(
&self,
overlay: BlockchainOverlayPtr,
_time_keeper: &TimeKeeper,
blocks: &[BlockInfo],
) -> Result<()> {
/// Validate a set of [`BlockInfo`] in sequence and apply them if all are valid.
pub async fn add_blocks(&self, blocks: &[BlockInfo]) -> Result<()> {
debug!(target: "validator", "Instantiating BlockchainOverlay");
let overlay = BlockchainOverlay::new(&self.blockchain)?;
// Retrieve last block
let lock = overlay.lock().unwrap();
let mut previous = if !lock.is_empty()? { Some(lock.last_block()?) } else { None };
// Validate and insert each block
for block in blocks {
// Check if block already exists
if lock.has_block(block)? {
return Err(Error::BlockAlreadyExists(block.blockhash().to_string()))
}
// This will be true for every insert, apart from genesis
if let Some(p) = previous {
block.validate(&p)?;
}
// TODO: Add rest block verifications here
/*
let current_slot = self.consensus.time_keeper.current_slot();
if slot.id > current_slot {
return Err(Error::FutureSlotReceived(slot.id))
}
*/
// Insert block
lock.add_block(block)?;
if verify_block(overlay.clone(), block, &previous).is_err() {
warn!(target: "validator", "Erroneous block found in set");
overlay.lock().unwrap().overlay.lock().unwrap().purge_new_trees()?;
return Err(Error::BlockIsInvalid(block.blockhash().to_string()))
};
// Use last inserted block as next iteration previous
previous = Some(block.clone());
}
Ok(())
}
/// Validate WASM execution, signatures, and ZK proofs for a given [`Transaction`].
async fn verify_transaction(
&self,
blockchain_overlay: BlockchainOverlayPtr,
tx: &Transaction,
time_keeper: &TimeKeeper,
verifying_keys: &mut HashMap<[u8; 32], HashMap<String, VerifyingKey>>,
) -> Result<()> {
let tx_hash = tx.hash();
debug!(target: "validator", "Validating transaction {}", tx_hash);
// Table of public inputs used for ZK proof verification
let mut zkp_table = vec![];
// Table of public keys used for signature verification
let mut sig_table = vec![];
// Iterate over all calls to get the metadata
for (idx, call) in tx.calls.iter().enumerate() {
debug!(target: "validator", "Executing contract call {}", idx);
// Write the actual payload data
let mut payload = vec![];
payload.write_u32(idx as u32)?; // Call index
tx.calls.encode(&mut payload)?; // Actual call data
debug!(target: "validator", "Instantiating WASM runtime");
let wasm = blockchain_overlay.lock().unwrap().wasm_bincode.get(call.contract_id)?;
let mut runtime = Runtime::new(
&wasm,
blockchain_overlay.clone(),
call.contract_id,
time_keeper.clone(),
)?;
debug!(target: "validator", "Executing \"metadata\" call");
let metadata = runtime.metadata(&payload)?;
// Decode the metadata retrieved from the execution
let mut decoder = Cursor::new(&metadata);
// The tuple is (zkasa_ns, public_inputs)
let zkp_pub: Vec<(String, Vec<pallas::Base>)> = Decodable::decode(&mut decoder)?;
let sig_pub: Vec<PublicKey> = Decodable::decode(&mut decoder)?;
// TODO: Make sure we've read all the bytes above.
debug!(target: "validator", "Successfully executed \"metadata\" call");
// Here we'll look up verifying keys and insert them into the per-contract map.
debug!(target: "validator", "Performing VerifyingKey lookups from the sled db");
for (zkas_ns, _) in &zkp_pub {
let inner_vk_map = verifying_keys.get_mut(&call.contract_id.to_bytes()).unwrap();
// TODO: This will be a problem in case of ::deploy, unless we force a different
// namespace and disable updating existing circuit. Might be a smart idea to do
// so in order to have to care less about being able to verify historical txs.
if inner_vk_map.contains_key(zkas_ns.as_str()) {
continue
}
let (_, vk) = blockchain_overlay
.lock()
.unwrap()
.contracts
.get_zkas(&call.contract_id, zkas_ns)?;
inner_vk_map.insert(zkas_ns.to_string(), vk);
}
zkp_table.push(zkp_pub);
sig_table.push(sig_pub);
// After getting the metadata, we run the "exec" function with the same runtime
// and the same payload.
debug!(target: "validator", "Executing \"exec\" call");
let state_update = runtime.exec(&payload)?;
debug!(target: "validator", "Successfully executed \"exec\" call");
// If that was successful, we apply the state update in the ephemeral overlay.
debug!(target: "validator", "Executing \"apply\" call");
runtime.apply(&state_update)?;
debug!(target: "validator", "Successfully executed \"apply\" call");
// At this point we're done with the call and move on to the next one.
}
// When we're done looping and executing over the tx's contract calls, we now
// move on with verification. First we verify the signatures as that's cheaper,
// and then finally we verify the ZK proofs.
debug!(target: "validator", "Verifying signatures for transaction {}", tx_hash);
if sig_table.len() != tx.signatures.len() {
error!(target: "validator", "Incorrect number of signatures in tx {}", tx_hash);
return Err(TxVerifyFailed::MissingSignatures.into())
}
// TODO: Go through the ZK circuits that have to be verified and account for the opcodes.
if let Err(e) = tx.verify_sigs(sig_table) {
error!(target: "validator", "Signature verification for tx {} failed: {}", tx_hash, e);
return Err(TxVerifyFailed::InvalidSignature.into())
}
debug!(target: "validator", "Signature verification successful");
debug!(target: "validator", "Verifying ZK proofs for transaction {}", tx_hash);
if let Err(e) = tx.verify_zkps(verifying_keys, zkp_table).await {
error!(target: "consensus::validator", "ZK proof verification for tx {} failed: {}", tx_hash, e);
return Err(TxVerifyFailed::InvalidZkProof.into())
}
debug!(target: "validator", "ZK proof verification successful");
debug!(target: "validator", "Transaction {} verified successfully", tx_hash);
debug!(target: "validator", "Applying overlay changes");
overlay.lock().unwrap().overlay.lock().unwrap().apply()?;
Ok(())
}
@@ -292,29 +148,14 @@ impl Validator {
/// In case any of the transactions fail, they will be returned to the caller.
/// The function takes a boolean called `write` which tells it to actually write
/// the state transitions to the database.
pub async fn verify_transactions(
pub async fn add_transactions(
&self,
txs: &[Transaction],
verifying_slot: u64,
write: bool,
) -> Result<()> {
debug!(target: "validator", "Verifying {} transactions", txs.len());
debug!(target: "validator", "Instantiating BlockchainOverlay");
let blockchain_overlay = BlockchainOverlay::new(&self.blockchain)?;
// Tracker for failed txs
let mut erroneous_txs = vec![];
// Map of ZK proof verifying keys for the current transaction batch
let mut vks: HashMap<[u8; 32], HashMap<String, VerifyingKey>> = HashMap::new();
// Initialize the map
for tx in txs {
for call in &tx.calls {
vks.insert(call.contract_id.to_bytes(), HashMap::new());
}
}
let overlay = BlockchainOverlay::new(&self.blockchain)?;
// Generate a time keeper using transaction verifying slot
let time_keeper = TimeKeeper::new(
@@ -324,21 +165,10 @@ impl Validator {
verifying_slot,
);
// Iterate over transactions and attempt to verify them
for tx in txs {
blockchain_overlay.lock().unwrap().checkpoint();
if let Err(e) = self
.verify_transaction(blockchain_overlay.clone(), tx, &time_keeper, &mut vks)
.await
{
warn!(target: "validator", "Transaction verification failed: {}", e);
erroneous_txs.push(tx.clone());
// TODO: verify this works as expected
blockchain_overlay.lock().unwrap().revert_to_checkpoint()?;
}
}
// Verify all transactions and get erroneous ones
let erroneous_txs = verify_transactions(overlay.clone(), &time_keeper, txs).await?;
let lock = blockchain_overlay.lock().unwrap();
let lock = overlay.lock().unwrap();
let mut overlay = lock.overlay.lock().unwrap();
if !erroneous_txs.is_empty() {
warn!(target: "validator", "Erroneous transactions found in set");

View File

@@ -0,0 +1,208 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2023 Dyne.org foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{collections::HashMap, io::Cursor};
use darkfi_sdk::{crypto::PublicKey, pasta::pallas};
use darkfi_serial::{Decodable, Encodable, WriteExt};
use log::{debug, error, warn};
use crate::{
blockchain::{BlockInfo, BlockchainOverlayPtr},
error::TxVerifyFailed,
runtime::vm_runtime::Runtime,
tx::Transaction,
util::time::TimeKeeper,
zk::VerifyingKey,
Error, Result,
};
/// Validate given [`Transaction`], and apply it to the provided overlay
pub fn verify_block(
overlay: BlockchainOverlayPtr,
block: &BlockInfo,
previous: &Option<BlockInfo>,
) -> Result<()> {
let block_hash = block.blockhash();
debug!(target: "validator", "Validating block {}", block_hash);
let lock = overlay.lock().unwrap();
// Check if block already exists
if lock.has_block(block)? {
return Err(Error::BlockAlreadyExists(block.blockhash().to_string()))
}
// This will be true for every block, apart from genesis
if let Some(p) = previous {
block.validate(p)?;
}
// TODO: Add rest block verifications here
// Insert block
lock.add_block(block)?;
debug!(target: "validator", "Block {} verified successfully", block_hash);
Ok(())
}
/// Validate WASM execution, signatures, and ZK proofs for a given [`Transaction`],
/// and apply them it to the provided overlay.
pub async fn verify_transaction(
overlay: BlockchainOverlayPtr,
time_keeper: &TimeKeeper,
tx: &Transaction,
verifying_keys: &mut HashMap<[u8; 32], HashMap<String, VerifyingKey>>,
) -> Result<()> {
let tx_hash = tx.hash();
debug!(target: "validator", "Validating transaction {}", tx_hash);
// Table of public inputs used for ZK proof verification
let mut zkp_table = vec![];
// Table of public keys used for signature verification
let mut sig_table = vec![];
// Iterate over all calls to get the metadata
for (idx, call) in tx.calls.iter().enumerate() {
debug!(target: "validator", "Executing contract call {}", idx);
// Write the actual payload data
let mut payload = vec![];
payload.write_u32(idx as u32)?; // Call index
tx.calls.encode(&mut payload)?; // Actual call data
debug!(target: "validator", "Instantiating WASM runtime");
let wasm = overlay.lock().unwrap().wasm_bincode.get(call.contract_id)?;
let mut runtime =
Runtime::new(&wasm, overlay.clone(), call.contract_id, time_keeper.clone())?;
debug!(target: "validator", "Executing \"metadata\" call");
let metadata = runtime.metadata(&payload)?;
// Decode the metadata retrieved from the execution
let mut decoder = Cursor::new(&metadata);
// The tuple is (zkasa_ns, public_inputs)
let zkp_pub: Vec<(String, Vec<pallas::Base>)> = Decodable::decode(&mut decoder)?;
let sig_pub: Vec<PublicKey> = Decodable::decode(&mut decoder)?;
// TODO: Make sure we've read all the bytes above.
debug!(target: "validator", "Successfully executed \"metadata\" call");
// Here we'll look up verifying keys and insert them into the per-contract map.
debug!(target: "validator", "Performing VerifyingKey lookups from the sled db");
for (zkas_ns, _) in &zkp_pub {
let inner_vk_map = verifying_keys.get_mut(&call.contract_id.to_bytes()).unwrap();
// TODO: This will be a problem in case of ::deploy, unless we force a different
// namespace and disable updating existing circuit. Might be a smart idea to do
// so in order to have to care less about being able to verify historical txs.
if inner_vk_map.contains_key(zkas_ns.as_str()) {
continue
}
let (_, vk) = overlay.lock().unwrap().contracts.get_zkas(&call.contract_id, zkas_ns)?;
inner_vk_map.insert(zkas_ns.to_string(), vk);
}
zkp_table.push(zkp_pub);
sig_table.push(sig_pub);
// After getting the metadata, we run the "exec" function with the same runtime
// and the same payload.
debug!(target: "validator", "Executing \"exec\" call");
let state_update = runtime.exec(&payload)?;
debug!(target: "validator", "Successfully executed \"exec\" call");
// If that was successful, we apply the state update in the ephemeral overlay.
debug!(target: "validator", "Executing \"apply\" call");
runtime.apply(&state_update)?;
debug!(target: "validator", "Successfully executed \"apply\" call");
// At this point we're done with the call and move on to the next one.
}
// When we're done looping and executing over the tx's contract calls, we now
// move on with verification. First we verify the signatures as that's cheaper,
// and then finally we verify the ZK proofs.
debug!(target: "validator", "Verifying signatures for transaction {}", tx_hash);
if sig_table.len() != tx.signatures.len() {
error!(target: "validator", "Incorrect number of signatures in tx {}", tx_hash);
return Err(TxVerifyFailed::MissingSignatures.into())
}
// TODO: Go through the ZK circuits that have to be verified and account for the opcodes.
if let Err(e) = tx.verify_sigs(sig_table) {
error!(target: "validator", "Signature verification for tx {} failed: {}", tx_hash, e);
return Err(TxVerifyFailed::InvalidSignature.into())
}
debug!(target: "validator", "Signature verification successful");
debug!(target: "validator", "Verifying ZK proofs for transaction {}", tx_hash);
if let Err(e) = tx.verify_zkps(verifying_keys, zkp_table).await {
error!(target: "consensus::validator", "ZK proof verification for tx {} failed: {}", tx_hash, e);
return Err(TxVerifyFailed::InvalidZkProof.into())
}
debug!(target: "validator", "ZK proof verification successful");
debug!(target: "validator", "Transaction {} verified successfully", tx_hash);
Ok(())
}
/// Validate a set of [`Transaction`] in sequence and apply them if all are valid.
/// In case any of the transactions fail, they will be returned to the caller.
/// The function takes a boolean called `write` which tells it to actually write
/// the state transitions to the database.
pub async fn verify_transactions(
overlay: BlockchainOverlayPtr,
time_keeper: &TimeKeeper,
txs: &[Transaction],
) -> Result<Vec<Transaction>> {
debug!(target: "validator", "Verifying {} transactions", txs.len());
// Tracker for failed txs
let mut erroneous_txs = vec![];
// Map of ZK proof verifying keys for the current transaction batch
let mut vks: HashMap<[u8; 32], HashMap<String, VerifyingKey>> = HashMap::new();
// Initialize the map
for tx in txs {
for call in &tx.calls {
vks.insert(call.contract_id.to_bytes(), HashMap::new());
}
}
// Iterate over transactions and attempt to verify them
for tx in txs {
overlay.lock().unwrap().checkpoint();
if let Err(e) = verify_transaction(overlay.clone(), time_keeper, tx, &mut vks).await {
warn!(target: "validator", "Transaction verification failed: {}", e);
erroneous_txs.push(tx.clone());
// TODO: verify this works as expected
overlay.lock().unwrap().revert_to_checkpoint()?;
}
}
Ok(erroneous_txs)
}