From 95a8319007875855d14c314643d1693acc4bd1b8 Mon Sep 17 00:00:00 2001 From: x Date: Thu, 13 Nov 2025 13:51:53 +0000 Subject: [PATCH] darkfid: Monero Merge Mining --- Cargo.lock | 12 +- bin/darkfid/Cargo.toml | 4 + bin/darkfid/src/lib.rs | 60 ++- bin/darkfid/src/rpc.rs | 2 + bin/darkfid/src/rpc_xmr.rs | 439 +++++++++++++++++- bin/darkfid/src/task/miner.rs | 42 +- bin/darkfid/src/task/mod.rs | 2 +- bin/darkfid/src/tests/harness.rs | 2 +- src/blockchain/monero/fixed_array.rs | 150 +++++- .../monero}/merkle_tree_parameters.rs | 0 src/blockchain/monero/mod.rs | 272 ++++++++++- src/blockchain/monero/utils.rs | 64 ++- src/validator/mod.rs | 3 - src/validator/pow.rs | 6 +- src/validator/xmr/helpers.rs | 239 ---------- src/validator/xmr/mod.rs | 74 --- 16 files changed, 977 insertions(+), 394 deletions(-) rename src/{validator/xmr => blockchain/monero}/merkle_tree_parameters.rs (100%) delete mode 100644 src/validator/xmr/helpers.rs delete mode 100644 src/validator/xmr/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 87c2b8fd1..885b44e03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1833,7 +1833,6 @@ dependencies = [ "semver", "serde", "sha2", - "simplelog", "sled-overlay", "smol", "socket2", @@ -2030,6 +2029,8 @@ dependencies = [ "darkfi-serial", "darkfi_money_contract", "easy-parallel", + "hex", + "monero", "num-bigint", "rand 0.8.5", "serde", @@ -4635,15 +4636,6 @@ dependencies = [ "syn 2.0.104", ] -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - [[package]] name = "objc2-core-foundation" version = "0.3.1" diff --git a/bin/darkfid/Cargo.toml b/bin/darkfid/Cargo.toml index 248c6e6f7..be2f26702 100644 --- a/bin/darkfid/Cargo.toml +++ b/bin/darkfid/Cargo.toml @@ -30,6 +30,10 @@ async-trait = "0.1.88" tinyjson = "2.5.1" url = "2.5.4" +# Monero +monero = "0.21.0" +hex = "0.4.3" + # Daemon easy-parallel = "3.3.1" signal-hook-async-std = "0.3.0" diff --git a/bin/darkfid/src/lib.rs b/bin/darkfid/src/lib.rs index 173b25ae3..de2e27933 100644 --- a/bin/darkfid/src/lib.rs +++ b/bin/darkfid/src/lib.rs @@ -26,6 +26,7 @@ use tracing::{debug, error, info, warn}; use url::Url; use darkfi::{ + blockchain::{BlockInfo, HeaderHash}, net::settings::Settings, rpc::{ jsonrpc::JsonSubscriber, @@ -33,9 +34,15 @@ use darkfi::{ settings::RpcSettings, }, system::{ExecutorPtr, StoppableTask, StoppableTaskPtr}, - validator::{Validator, ValidatorConfig, ValidatorPtr}, + validator::{ + consensus::Fork, utils::best_fork_index, Validator, ValidatorConfig, ValidatorPtr, + }, + zk::{empty_witnesses, ProvingKey, ZkCircuit}, + zkas::ZkBinary, Error, Result, }; +use darkfi_money_contract::MONEY_CONTRACT_ZKAS_MINT_NS_V1; +use darkfi_sdk::crypto::{keypair::SecretKey, MONEY_CONTRACT_ID}; #[cfg(test)] mod tests; @@ -77,6 +84,10 @@ pub struct DarkfiNode { rpc_client: Option>, /// HTTP JSON-RPC connection tracker mm_rpc_connections: Mutex>, + /// Merge mining block templates + mm_blocktemplates: Mutex>, + /// PowRewardV1 ZK data + powrewardv1_zk: PowRewardV1Zk, } impl DarkfiNode { @@ -86,8 +97,10 @@ impl DarkfiNode { txs_batch_size: usize, subscribers: HashMap<&'static str, JsonSubscriber>, rpc_client: Option>, - ) -> DarkfiNodePtr { - Arc::new(Self { + ) -> Result { + let powrewardv1_zk = PowRewardV1Zk::new(validator.clone())?; + + Ok(Arc::new(Self { p2p_handler, validator, txs_batch_size, @@ -95,7 +108,42 @@ impl DarkfiNode { rpc_connections: Mutex::new(HashSet::new()), rpc_client, mm_rpc_connections: Mutex::new(HashSet::new()), - }) + mm_blocktemplates: Mutex::new(HashMap::new()), + powrewardv1_zk, + })) + } + + /// Grab best current fork + pub async fn best_current_fork(&self) -> Result { + let forks = self.validator.consensus.forks.read().await; + let index = best_fork_index(&forks)?; + forks[index].full_clone() + } +} + +/// ZK data used to generate the "coinbase" transaction in a block +pub(crate) struct PowRewardV1Zk { + pub zkbin: ZkBinary, + pub provingkey: ProvingKey, +} + +impl PowRewardV1Zk { + pub fn new(validator: ValidatorPtr) -> Result { + info!( + target: "darkfid::PowRewardV1Zk::new", + "Generating PowRewardV1 ZkCircuit and ProvingKey...", + ); + + let (zkbin, _) = validator.blockchain.contracts.get_zkas( + &validator.blockchain.sled_db, + &MONEY_CONTRACT_ID, + MONEY_CONTRACT_ZKAS_MINT_NS_V1, + )?; + + let circuit = ZkCircuit::new(empty_witnesses(&zkbin)?, &zkbin); + let provingkey = ProvingKey::build(zkbin.k, &circuit); + + Ok(Self { zkbin, provingkey }) } } @@ -164,8 +212,8 @@ impl Darkfid { }; // Initialize node - let node = - DarkfiNode::new(p2p_handler, validator, txs_batch_size, subscribers, rpc_client).await; + let node = DarkfiNode::new(p2p_handler, validator, txs_batch_size, subscribers, rpc_client) + .await?; // Generate the background tasks let dnet_task = StoppableTask::new(); diff --git a/bin/darkfid/src/rpc.rs b/bin/darkfid/src/rpc.rs index c06d055fd..62ba36656 100644 --- a/bin/darkfid/src/rpc.rs +++ b/bin/darkfid/src/rpc.rs @@ -139,6 +139,8 @@ impl RequestHandler for DarkfiNode { // P2Pool methods requested for Monero Merge Mining // ================================================ "merge_mining_get_chain_id" => self.xmr_merge_mining_get_chain_id(req.id, req.params).await, + "merge_mining_get_aux_block" => self.xmr_merge_mining_get_aux_block(req.id, req.params).await, + "merge_mining_submit_solution" => self.xmr_merge_mining_submit_solution(req.id, req.params).await, // ============== // Invalid method diff --git a/bin/darkfid/src/rpc_xmr.rs b/bin/darkfid/src/rpc_xmr.rs index b100508c4..fb192a5f7 100644 --- a/bin/darkfid/src/rpc_xmr.rs +++ b/bin/darkfid/src/rpc_xmr.rs @@ -16,13 +16,32 @@ * along with this program. If not, see . */ -use std::collections::HashMap; +use std::{collections::HashMap, str::FromStr}; -use darkfi::rpc::jsonrpc::{ErrorCode, JsonError, JsonResponse, JsonResult}; +use darkfi::{ + blockchain::{ + header_store::PowData, + monero::{ + extract_aux_merkle_root_from_block, + fixed_array::{FixedByteArray, MaxSizeVec}, + merkle_proof::MerkleProof, + monero_block_deserialize, MoneroPowData, + }, + HeaderHash, + }, + rpc::jsonrpc::{ErrorCode, ErrorCode::InvalidParams, JsonError, JsonResponse, JsonResult}, + validator::consensus::Proposal, +}; +use darkfi_sdk::crypto::PublicKey; +use hex::FromHex; use tinyjson::JsonValue; -use tracing::error; +use tracing::{error, info}; -use crate::DarkfiNode; +use crate::{ + proto::ProposalMessage, + task::miner::{generate_next_block, MinerRewardsRecipientConfig}, + DarkfiNode, +}; // https://github.com/SChernykh/p2pool/blob/master/docs/MERGE_MINING.MD @@ -44,17 +63,18 @@ impl DarkfiNode { Err(e) => { error!( target: "darkfid::rpc::xmr_merge_mining_get_chain_id", - "[RPC] Error fetching genesis block hash: {e}" + "[RPC-XMR] Error fetching genesis block hash: {e}" ); return JsonError::new(ErrorCode::InternalError, None, id).into() } }; - let genesis_hex = genesis_hash.to_string(); - assert!(genesis_hex.len() == 32); + // TODO: XXX: This should also have more specialized identifiers. + // e.g. chain_id = H(genesis || aux_nonce || checkpoint_height) - let resp_obj = HashMap::from([("chain_id".to_string(), genesis_hex.into())]); - JsonResponse::new(resp_obj.into(), id).into() + let response = + HashMap::from([("chain_id".to_string(), JsonValue::from(genesis_hash.to_string()))]); + JsonResponse::new(JsonValue::from(response), id).into() } // RPCAPI: @@ -77,7 +97,146 @@ impl DarkfiNode { // --> {"jsonrpc":"2.0", "method": "merge_mining_get_aux_block", "params": {"address": "MERGE_MINED_CHAIN_ADDRESS", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "height": 3000000, "prev_id":"ad505b0be8a49b89273e307106fa42133cbd804456724c5e7635bd953215d92a"}, "id": 1} // <-- {"jsonrpc":"2.0", "result": {"aux_blob": "4c6f72656d20697073756d", "aux_diff": 123456, "aux_hash":"f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f"}, "id": 1} pub async fn xmr_merge_mining_get_aux_block(&self, id: u16, params: JsonValue) -> JsonResult { - todo!() + let Some(params) = params.get::>() else { + return JsonError::new(InvalidParams, None, id).into() + }; + + // Validate address + let Some(address) = params.get("address") else { + return JsonError::new(InvalidParams, Some("missing address".to_string()), id).into() + }; + let Some(address) = address.get::() else { + return JsonError::new(InvalidParams, Some("invalid address format".to_string()), id) + .into() + }; + let Ok(address) = PublicKey::from_str(address) else { + return JsonError::new(InvalidParams, Some("invalid address format".to_string()), id) + .into() + }; + + // Validate aux_hash + let Some(aux_hash) = params.get("aux_hash") else { + return JsonError::new(InvalidParams, Some("missing aux_hash".to_string()), id).into() + }; + let Some(aux_hash) = aux_hash.get::() else { + return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id) + .into() + }; + let Ok(aux_hash) = HeaderHash::from_str(aux_hash) else { + return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id) + .into() + }; + + // Validate height + let Some(height) = params.get("height") else { + return JsonError::new(InvalidParams, Some("missing height".to_string()), id).into() + }; + let Some(height) = height.get::() else { + return JsonError::new(InvalidParams, Some("invalid height format".to_string()), id) + .into() + }; + let height = *height as u64; + + // Validate prev_id + let Some(prev_id) = params.get("prev_id") else { + return JsonError::new(InvalidParams, Some("missing prev_id".to_string()), id).into() + }; + let Some(prev_id) = prev_id.get::() else { + return JsonError::new(InvalidParams, Some("invalid prev_id format".to_string()), id) + .into() + }; + let Ok(prev_id) = hex::decode(prev_id) else { + return JsonError::new(InvalidParams, Some("invalid prev_id format".to_string()), id) + .into() + }; + let prev_id = monero::Hash::from_slice(&prev_id); + + info!( + target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block", + "[RPC-XMR] Got blocktemplate request: address={}, aux_hash={}, height={}, prev_id={}", + address, aux_hash, height, prev_id, + ); + + // Method params format is correct. Let's check if we provided this + // mining job already. If so, we can just return an empty response. + // We'll also obtain a lock here to avoid getting polled multiple + // times and potentially missing a job. The lock is released when + // this function exits. + let mut mm_blocktemplates = self.mm_blocktemplates.lock().await; + if mm_blocktemplates.contains_key(&aux_hash) { + return JsonResponse::new(JsonValue::from(HashMap::new()), id).into() + } + + // If it's a new job, clear the previous one(s). + mm_blocktemplates.clear(); + + // At this point, we should query the Validator for a new blocktemplate. + // We first need to construct `MinerRewardsRecipientConfig` from the + // address provided to us through the RPC. + let recipient_config = + MinerRewardsRecipientConfig { recipient: address, spend_hook: None, user_data: None }; + + // Now let's try to construct the blocktemplate. + let mut extended_fork = match self.best_current_fork().await { + Ok(f) => f, + Err(e) => { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block", + "[RPC-XMR] Finding best fork index failed: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + }; + + // Find the difficulty. Note we cast it to f64 here. + let difficulty: f64 = match extended_fork.module.next_difficulty() { + Ok(v) => { + // We will attempt to cast it to f64. This should always work. + v.to_string().parse().unwrap() + } + Err(e) => { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block", + "[RPC-XMR] Finding next mining difficulty failed: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + }; + + let (_, blocktemplate, block_signing_secret) = match generate_next_block( + &mut extended_fork, + &recipient_config, + &self.powrewardv1_zk.zkbin, + &self.powrewardv1_zk.provingkey, + self.validator.consensus.module.read().await.target, + self.validator.verify_fees, + ) + .await + { + Ok(v) => v, + Err(e) => { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block", + "[RPC-XMR] Failed to generate next blocktemplate: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + }; + + // Now we have the blocktemplate. We'll mark it down in memory, + // and then ship it to RPC. + let blockhash = blocktemplate.header.template_hash(); + mm_blocktemplates.insert(blockhash, (blocktemplate, block_signing_secret)); + + let response = JsonValue::from(HashMap::from([ + ("aux_blob".to_string(), JsonValue::from(blockhash.as_string())), + ("aux_diff".to_string(), JsonValue::from(difficulty)), + ("aux_hash".to_string(), JsonValue::from(blockhash.as_string())), + ])); + + info!("<-- {}", response.stringify().unwrap()); + + JsonResponse::new(response, id).into() } // RPCAPI: @@ -103,6 +262,264 @@ impl DarkfiNode { // --> {"jsonrpc":"2.0", "method": "merge_mining_submit_solution", "params": {"aux_blob": "4c6f72656d20697073756d", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "blob": "...", "merkle_proof": ["hash1", "hash2", "hash3"], "path": 3, "seed_hash": "22c3d47c595ae888b5d7fc304235f92f8854644d4fad38c5680a5d4a81009fcd"}, "id": 1} // <-- {"jsonrpc":"2.0", "result": {"status": "accepted"}, "id": 1} pub async fn xmr_merge_mining_submit_solution(&self, id: u16, params: JsonValue) -> JsonResult { - todo!() + let Some(params) = params.get::>() else { + return JsonError::new(InvalidParams, None, id).into() + }; + + // Validate aux_blob + let Some(aux_blob) = params.get("aux_blob") else { + return JsonError::new(InvalidParams, Some("missing aux_blob".to_string()), id).into() + }; + let Some(aux_blob) = aux_blob.get::() else { + return JsonError::new(InvalidParams, Some("invalid aux_blob format".to_string()), id) + .into() + }; + let Ok(_aux_blob) = HeaderHash::from_str(aux_blob) else { + return JsonError::new(InvalidParams, Some("invalid aux_blob format".to_string()), id) + .into() + }; + + // Validate aux_hash + let Some(aux_hash) = params.get("aux_hash") else { + return JsonError::new(InvalidParams, Some("missing aux_hash".to_string()), id).into() + }; + let Some(aux_hash) = aux_hash.get::() else { + return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id) + .into() + }; + let Ok(aux_hash) = HeaderHash::from_str(aux_hash) else { + return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id) + .into() + }; + + // If we don't know about this `aux_hash`, we can just abort here. + let mut mm_blocktemplates = self.mm_blocktemplates.lock().await; + if !mm_blocktemplates.contains_key(&aux_hash) { + return JsonError::new(InvalidParams, Some("unknown aux_hash".to_string()), id).into() + } + + // Validate blob + let Some(blob) = params.get("blob") else { + return JsonError::new(InvalidParams, Some("missing blob".to_string()), id).into() + }; + let Some(blob) = blob.get::() else { + return JsonError::new(InvalidParams, Some("invalid blob format".to_string()), id).into() + }; + let Ok(block) = monero_block_deserialize(blob) else { + return JsonError::new(InvalidParams, Some("invalid blob format".to_string()), id).into() + }; + + // Validate merkle_proof + let Some(merkle_proof_j) = params.get("merkle_proof") else { + return JsonError::new(InvalidParams, Some("missing merkle_proof".to_string()), id) + .into() + }; + let Some(merkle_proof_j) = merkle_proof_j.get::>() else { + return JsonError::new( + InvalidParams, + Some("invalid merkle_proof format".to_string()), + id, + ) + .into() + }; + let mut merkle_proof: Vec = Vec::with_capacity(merkle_proof_j.len()); + for hash in merkle_proof_j.iter() { + match hash.get::() { + Some(v) => { + let Ok(val) = monero::Hash::from_hex(v) else { + return JsonError::new( + InvalidParams, + Some("invalid merkle_proof format".to_string()), + id, + ) + .into() + }; + + merkle_proof.push(val); + } + None => { + return JsonError::new( + InvalidParams, + Some("invalid merkle_proof format".to_string()), + id, + ) + .into() + } + } + } + + // Validate path + let Some(path) = params.get("path") else { + return JsonError::new(InvalidParams, Some("missing path".to_string()), id).into() + }; + let Some(path) = path.get::() else { + return JsonError::new(InvalidParams, Some("invalid path format".to_string()), id).into() + }; + let path = *path as u32; + + // Validate seed_hash + let Some(seed_hash) = params.get("seed_hash") else { + return JsonError::new(InvalidParams, Some("missing seed_hash".to_string()), id).into() + }; + let Some(seed_hash) = seed_hash.get::() else { + return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id) + .into() + }; + let Ok(seed_hash) = monero::Hash::from_hex(seed_hash) else { + return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id) + .into() + }; + let Ok(seed_hash) = FixedByteArray::from_bytes(seed_hash.as_bytes()) else { + return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id) + .into() + }; + + info!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Got solution submission: aux_hash={aux_hash}", + ); + + // ======================================= + // Now we will validate the block contents + // ======================================= + + let Ok(merkle_root) = extract_aux_merkle_root_from_block(&block) else { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Extracting aux_merkle_root from XMR block failed", + ); + return JsonError::new(InvalidParams, None, id).into() + }; + + let Some(merkle_root) = merkle_root else { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Did not find merge mining hash in block", + ); + return JsonError::new(InvalidParams, None, id).into() + }; + + // Verify the merge mining hash + let Some(aux_hash_merkle_proof) = MerkleProof::try_construct(merkle_proof, path) else { + return JsonError::new( + InvalidParams, + Some("invalid aux_hash merkle proof".to_string()), + id, + ) + .into() + }; + + if aux_hash_merkle_proof.calculate_root(&monero::Hash::from(aux_hash.inner())) != + merkle_root + { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Could not validate aux_hash Merkle root", + ); + return JsonError::new( + InvalidParams, + Some("invalid aux_hash merkle proof".to_string()), + id, + ) + .into() + } + + // Construct MoneroPowData + let aux_chain_hashes = + MaxSizeVec::from_items_truncate(vec![monero::Hash::from(aux_hash.inner())]); + let monero_pow_data = + match MoneroPowData::new(block, seed_hash, aux_chain_hashes, *aux_hash.inner()) { + Ok(v) => v, + Err(e) => { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Failed constructing MoneroPowData: {e}", + ); + return JsonError::new( + InvalidParams, + Some("failed constructing moneropowdata".to_string()), + id, + ) + .into() + } + }; + + if !monero_pow_data.is_coinbase_valid_merkle_root() { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] MoneroPowData invalid coinbase Merkle proof", + ); + return JsonError::new( + InvalidParams, + Some("invalid coinbase merkle proof".to_string()), + id, + ) + .into() + } + + // Append MoneroPowData to the DarkFi block and verify it. + let extended_fork = match self.best_current_fork().await { + Ok(f) => f, + Err(e) => { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Finding best fork index failed: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + }; + + let (block, secret) = &mm_blocktemplates.get(&aux_hash).unwrap(); + let mut block = block.clone(); + block.header.pow_data = PowData::Monero(monero_pow_data); + block.sign(secret); + + if let Err(e) = extended_fork.module.verify_current_block(&block.header) { + error!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Failed verifying merge mined block: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + + info!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Success verifying merge mined block!", + ); + + // At this point we should be able to clear the working job. + // We still won't release the lock in hope of proposing the block + // first. + mm_blocktemplates.clear(); + + // Propose the new block. + info!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Proposing new block to network", + ); + let proposal = Proposal::new(block); + if let Err(e) = self.validator.append_proposal(&proposal).await { + error!( + target: "darkfid::rpc_xmr::xmr_merge_submit_solution", + "[RPC-XMR] Error proposing new block: {e}", + ); + return JsonError::new(ErrorCode::InternalError, None, id).into() + } + + info!( + target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution", + "[RPC-XMR] Broadcasting new block to network", + ); + let message = ProposalMessage(proposal); + self.p2p_handler.p2p.broadcast(&message).await; + + JsonResponse::new( + JsonValue::from(HashMap::from([( + "status".to_string(), + JsonValue::from("accepted".to_string()), + )])), + id, + ) + .into() } } diff --git a/bin/darkfid/src/task/miner.rs b/bin/darkfid/src/task/miner.rs index 29c5575ca..6950c2ef9 100644 --- a/bin/darkfid/src/task/miner.rs +++ b/bin/darkfid/src/task/miner.rs @@ -28,15 +28,13 @@ use darkfi::{ utils::best_fork_index, verification::apply_producer_transaction, }, - zk::{empty_witnesses, ProvingKey, ZkCircuit}, + zk::ProvingKey, zkas::ZkBinary, Error, Result, }; -use darkfi_money_contract::{ - client::pow_reward_v1::PoWRewardCallBuilder, MoneyFunction, MONEY_CONTRACT_ZKAS_MINT_NS_V1, -}; +use darkfi_money_contract::{client::pow_reward_v1::PoWRewardCallBuilder, MoneyFunction}; use darkfi_sdk::{ - crypto::{poseidon_hash, FuncId, MerkleTree, PublicKey, SecretKey, MONEY_CONTRACT_ID}, + crypto::{FuncId, MerkleTree, PublicKey, SecretKey, MONEY_CONTRACT_ID}, pasta::pallas, ContractCall, }; @@ -75,16 +73,6 @@ pub async fn miner_task( // Initialize miner configuration info!(target: "darkfid::task::miner_task", "Starting miner task..."); - // Grab zkas proving keys and bin for PoWReward transaction - info!(target: "darkfid::task::miner_task", "Generating zkas bin and proving keys..."); - let (zkbin, _) = node.validator.blockchain.contracts.get_zkas( - &node.validator.blockchain.sled_db, - &MONEY_CONTRACT_ID, - MONEY_CONTRACT_ZKAS_MINT_NS_V1, - )?; - let circuit = ZkCircuit::new(empty_witnesses(&zkbin)?, &zkbin); - let pk = ProvingKey::build(zkbin.k, &circuit); - // Grab blocks subscriber let block_sub = node.subscribers.get("blocks").unwrap(); @@ -132,36 +120,28 @@ pub async fn miner_task( // Start miner loop loop { // Grab best current fork - let forks = node.validator.consensus.forks.read().await; - let index = match best_fork_index(&forks) { - Ok(i) => i, - Err(e) => { - error!( - target: "darkfid::task::miner_task", - "Finding best fork index failed: {e}" - ); - continue - } - }; - let extended_fork = match forks[index].full_clone() { + let extended_fork = match node.best_current_fork().await { Ok(f) => f, Err(e) => { error!( target: "darkfid::task::miner_task", - "Fork full clone creation failed: {e}" + "Finding best fork index failed: {e}", ); continue } }; - drop(forks); // Grab extended fork last proposal hash let last_proposal_hash = extended_fork.last_proposal()?.hash; + // Grab zkas proving keys and bin for PoWReward transaction + let zkbin = &node.powrewardv1_zk.zkbin; + let pk = &node.powrewardv1_zk.provingkey; + // Start listenning for network proposals and mining next block for best fork. match smol::future::or( listen_to_network(node, last_proposal_hash, &subscription, &sender), - mine(node, extended_fork, recipient_config, &zkbin, &pk, &stop_signal, skip_sync), + mine(node, extended_fork, recipient_config, zkbin, pk, &stop_signal, skip_sync), ) .await { @@ -351,7 +331,7 @@ async fn mine_next_block( } /// Auxiliary function to generate next block in an atomic manner. -async fn generate_next_block( +pub async fn generate_next_block( extended_fork: &mut Fork, recipient_config: &MinerRewardsRecipientConfig, zkbin: &ZkBinary, diff --git a/bin/darkfid/src/task/mod.rs b/bin/darkfid/src/task/mod.rs index 7fbaacad6..661d2d326 100644 --- a/bin/darkfid/src/task/mod.rs +++ b/bin/darkfid/src/task/mod.rs @@ -20,7 +20,7 @@ pub mod consensus; pub use consensus::consensus_init_task; pub mod miner; -pub use miner::miner_task; +pub use miner::{generate_next_block, miner_task}; pub mod sync; pub use sync::sync_task; diff --git a/bin/darkfid/src/tests/harness.rs b/bin/darkfid/src/tests/harness.rs index 7226e4bcd..b21dd2642 100644 --- a/bin/darkfid/src/tests/harness.rs +++ b/bin/darkfid/src/tests/harness.rs @@ -290,7 +290,7 @@ pub async fn generate_node( let p2p_handler = DarkfidP2pHandler::init(settings, ex).await?; let node = DarkfiNode::new(p2p_handler.clone(), validator.clone(), 50, subscribers.clone(), None) - .await; + .await?; p2p_handler.clone().start(ex, &validator, &subscribers).await?; diff --git a/src/blockchain/monero/fixed_array.rs b/src/blockchain/monero/fixed_array.rs index 73ba63520..f26b0233f 100644 --- a/src/blockchain/monero/fixed_array.rs +++ b/src/blockchain/monero/fixed_array.rs @@ -18,14 +18,15 @@ use std::{ io::{self, Read, Write}, - ops::Deref, + marker::PhantomData, + ops::{Deref, DerefMut}, }; #[cfg(feature = "async-serial")] use darkfi_serial::{ async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, }; -use darkfi_serial::{Decodable, Encodable, ReadExt, WriteExt}; +use darkfi_serial::{Decodable, Encodable, ReadExt, SerialDecodable, SerialEncodable, WriteExt}; const MAX_ARR_SIZE: usize = 60; @@ -69,6 +70,21 @@ impl FixedByteArray { pub fn to_vec(&self) -> Vec { self.as_slice().to_vec() } + + pub fn from_bytes(bytes: &[u8]) -> io::Result { + if bytes.len() > MAX_ARR_SIZE { + return Err(io::Error::new(io::ErrorKind::OutOfMemory, "Slice too large")) + } + + let len = u8::try_from(bytes.len()).map_err(|_| io::ErrorKind::OutOfMemory)?; + + let mut elems = [0u8; MAX_ARR_SIZE]; + elems + .get_mut(..len as usize) + .expect("Cannot fail") + .copy_from_slice(bytes.get(..len as usize).expect("Cannot fail")); + Ok(Self { elems, len }) + } } impl Deref for FixedByteArray { @@ -121,7 +137,7 @@ impl Decodable for FixedByteArray { if len > MAX_ARR_SIZE { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("length exceeded max of 60 bytes for FixedByteArray: {}", len), + format!("length exceeded max of 60 bytes for FixedByteArray: {len}"), )); } @@ -143,7 +159,7 @@ impl AsyncDecodable for FixedByteArray { if len > MAX_ARR_SIZE { return Err(io::Error::new( io::ErrorKind::InvalidInput, - format!("length exceeded max of 60 bytes for FixedByteArray: {}", len), + format!("length exceeded max of 60 bytes for FixedByteArray: {len}"), )); } @@ -157,6 +173,132 @@ impl AsyncDecodable for FixedByteArray { } } +/// A vector that has a maximum size of `MAX_SIZE` +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, SerialEncodable, SerialDecodable)] +pub struct MaxSizeVec +where + T: Send + Sync, +{ + vec: Vec, + _marker: PhantomData, +} + +impl Default for MaxSizeVec +where + T: Send + Sync, +{ + fn default() -> Self { + Self::new() + } +} + +impl MaxSizeVec +where + T: Send + Sync, +{ + /// Creates a new `MaxSizeVec` with a capacity of `MAX_SIZE` + pub fn new() -> Self { + Self { vec: Vec::new(), _marker: PhantomData } + } + + /// Creates a new `MaxSizeVec` with the given data. + /// Returns an error if the data length exceeds `MAX_SIZE`. + pub fn new_with_data(data: Vec) -> io::Result { + if data.len() > MAX_SIZE { + return Err(io::Error::new(io::ErrorKind::StorageFull, "Size exceeded")) + } + + Ok(Self { vec: data, _marker: PhantomData }) + } + + /// Creates a `MaxSizeVec` from the given items, truncating if needed + pub fn from_items_truncate(items: Vec) -> Self { + let len = std::cmp::min(items.len(), MAX_SIZE); + Self { vec: items.into_iter().take(len).collect(), _marker: PhantomData } + } + + /// Consumes `MaxSizeVec` and returns the inner `Vec` + pub fn into_vec(self) -> Vec { + self.vec + } + + /// Returns the maximum size of the `MaxSizeVec` + pub fn max_size(&self) -> usize { + MAX_SIZE + } + + /// Pushes an item to the `MaxSizeVec` + pub fn push(&mut self, item: T) -> io::Result<()> { + if self.vec.len() >= MAX_SIZE { + return Err(io::Error::new(io::ErrorKind::StorageFull, "Size exceeded")) + } + + self.vec.push(item); + Ok(()) + } +} + +impl AsRef<[T]> for MaxSizeVec +where + T: Send + Sync, +{ + fn as_ref(&self) -> &[T] { + &self.vec + } +} + +impl Deref for MaxSizeVec +where + T: Send + Sync, +{ + type Target = [T]; + + fn deref(&self) -> &Self::Target { + &self.vec + } +} + +impl DerefMut for MaxSizeVec +where + T: Send + Sync, +{ + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.vec + } +} + +impl Iterator for MaxSizeVec +where + T: Send + Sync, +{ + type Item = T; + + fn next(&mut self) -> Option { + if self.vec.is_empty() { + None + } else { + Some(self.vec.remove(0)) + } + } +} + +impl FromIterator for MaxSizeVec +where + T: Send + Sync, +{ + fn from_iter>(iter: I) -> Self { + let mut vec = vec![]; + for item in iter { + if vec.len() >= MAX_SIZE { + break + } + vec.push(item); + } + + Self { vec, _marker: PhantomData } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/src/validator/xmr/merkle_tree_parameters.rs b/src/blockchain/monero/merkle_tree_parameters.rs similarity index 100% rename from src/validator/xmr/merkle_tree_parameters.rs rename to src/blockchain/monero/merkle_tree_parameters.rs diff --git a/src/blockchain/monero/mod.rs b/src/blockchain/monero/mod.rs index c1e6621c9..31fcbca3d 100644 --- a/src/blockchain/monero/mod.rs +++ b/src/blockchain/monero/mod.rs @@ -19,20 +19,27 @@ use std::{ fmt, io::{self, Cursor, Error, Read, Write}, + iter, }; +use darkfi_sdk::{hex::decode_hex, AsHex}; #[cfg(feature = "async-serial")] use darkfi_serial::{async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncWrite}; use darkfi_serial::{Decodable, Encodable}; use monero::{ - blockdata::transaction::RawExtraField, + blockdata::transaction::{ExtraField, RawExtraField, SubField}, consensus::{Decodable as XmrDecodable, Encodable as XmrEncodable}, - BlockHeader, Hash, + cryptonote::hash::Hashable, + util::ringct::{RctSigBase, RctType}, + BlockHeader, }; use tiny_keccak::{Hasher, Keccak}; +use tracing::warn; + +use crate::{Error::MoneroMergeMineError, Result}; pub mod fixed_array; -use fixed_array::FixedByteArray; +use fixed_array::{FixedByteArray, MaxSizeVec}; pub mod merkle_proof; use merkle_proof::MerkleProof; @@ -41,6 +48,12 @@ pub mod keccak; use keccak::{keccak_from_bytes, keccak_to_bytes}; pub mod utils; +use utils::{create_blockhashing_blob, create_merkle_proof, tree_hash}; + +pub mod merkle_tree_parameters; +pub use merkle_tree_parameters::MerkleTreeParameters; + +pub type AuxChainHashes = MaxSizeVec; /// This struct represents all the Proof of Work information required /// for merge mining. @@ -49,13 +62,12 @@ pub struct MoneroPowData { /// Monero Header fields pub header: BlockHeader, /// RandomX VM key - length varies to a max len of 60. - /// TODO: Implement a type, or use randomx_key[0] to define len. pub randomx_key: FixedByteArray, /// The number of transactions included in this Monero block. /// This is used to produce the blockhashing_blob. pub transaction_count: u16, /// Transaction root - pub merkle_root: Hash, + pub merkle_root: monero::Hash, /// Coinbase Merkle proof hashes pub coinbase_merkle_proof: MerkleProof, /// Incomplete hashed state of the coinbase transaction @@ -66,6 +78,96 @@ pub struct MoneroPowData { pub aux_chain_merkle_proof: MerkleProof, } +impl MoneroPowData { + /// Constructs the Monero PoW data from the given block and seed + pub fn new( + block: monero::Block, + seed: FixedByteArray, + ordered_aux_chain_hashes: AuxChainHashes, + darkfi_hash: [u8; 32], + ) -> Result { + let hashes = create_ordered_tx_hashes_from_block(&block); + let root = tree_hash(&hashes)?; + let hash = + hashes.first().ok_or(MoneroMergeMineError("No hashes for Merkle proof".to_string()))?; + + let coinbase_merkle_proof = create_merkle_proof(&hashes, hash).ok_or_else(|| { + MoneroMergeMineError( + "create_merkle_proof returned None because the block has no coinbase".to_string(), + ) + })?; + + let coinbase = block.miner_tx.clone(); + + let mut keccak = Keccak::v256(); + let mut encoder_prefix = vec![]; + coinbase.prefix.version.consensus_encode(&mut encoder_prefix)?; + coinbase.prefix.unlock_time.consensus_encode(&mut encoder_prefix)?; + coinbase.prefix.inputs.consensus_encode(&mut encoder_prefix)?; + coinbase.prefix.outputs.consensus_encode(&mut encoder_prefix)?; + keccak.update(&encoder_prefix); + + let d_hash = monero::Hash::from_slice(darkfi_hash.as_slice()); + let aux_chain_merkle_proof = create_merkle_proof(&ordered_aux_chain_hashes, &d_hash).ok_or_else(|| { + MoneroMergeMineError("create_merkle_proof returned none, could not find darkfi hash in aux chain hashes".to_string()) + })?; + + Ok(Self { + header: block.header, + randomx_key: seed, + transaction_count: hashes.len() as u16, + merkle_root: root, + coinbase_merkle_proof, + coinbase_tx_extra: block.miner_tx.prefix.extra, + coinbase_tx_hasher: keccak, + aux_chain_merkle_proof, + }) + } + + /// Returns `true` if the coinbase Merkle proof produces the `merkle_root` + /// hash, otherwise `false`. + pub fn is_coinbase_valid_merkle_root(&self) -> bool { + let mut finalised_prefix_keccak = self.coinbase_tx_hasher.clone(); + let mut encoder_extra_field = vec![]; + + self.coinbase_tx_extra.consensus_encode(&mut encoder_extra_field).unwrap(); + finalised_prefix_keccak.update(&encoder_extra_field); + let mut prefix_hash: [u8; 32] = [0u8; 32]; + finalised_prefix_keccak.finalize(&mut prefix_hash); + + let final_prefix_hash = monero::Hash::from_slice(&prefix_hash); + + // let mut finalised_keccak = Keccak::v256(); + let rct_sig_base = RctSigBase { + rct_type: RctType::Null, + txn_fee: Default::default(), + pseudo_outs: vec![], + ecdh_info: vec![], + out_pk: vec![], + }; + + let hashes = vec![final_prefix_hash, rct_sig_base.hash(), monero::Hash::null()]; + + let encoder_final: Vec = + hashes.into_iter().flat_map(|h| Vec::from(&h.to_bytes()[..])).collect(); + + let coinbase_hash = monero::Hash::new(encoder_final); + + let merkle_root = self.coinbase_merkle_proof.calculate_root(&coinbase_hash); + (self.merkle_root == merkle_root) && self.coinbase_merkle_proof.check_coinbase_path() + } + + /// Returns the blockhashing_blob for the Monero block + pub fn to_blockhashing_blob(&self) -> Vec { + create_blockhashing_blob(&self.header, &self.merkle_root, u64::from(self.transaction_count)) + } + + /// Returns the RandomX VM key + pub fn randomx_key(&self) -> &[u8] { + self.randomx_key.as_slice() + } +} + impl fmt::Debug for MoneroPowData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut digest = [0u8; 32]; @@ -144,7 +246,7 @@ impl Decodable for MoneroPowData { let transaction_count: u16 = Decodable::decode(d)?; let merkle_root = - Hash::consensus_decode(d).map_err(|_| Error::other("Invalid XMR hash"))?; + monero::Hash::consensus_decode(d).map_err(|_| Error::other("Invalid XMR hash"))?; let coinbase_merkle_proof: MerkleProof = Decodable::decode(d)?; @@ -182,8 +284,8 @@ impl AsyncDecodable for MoneroPowData { let buf: Vec = AsyncDecodable::decode_async(d).await?; let mut buf = Cursor::new(buf); - let merkle_root = - Hash::consensus_decode(&mut buf).map_err(|_| Error::other("Invalid XMR hash"))?; + let merkle_root = monero::Hash::consensus_decode(&mut buf) + .map_err(|_| Error::other("Invalid XMR hash"))?; let coinbase_merkle_proof: MerkleProof = AsyncDecodable::decode_async(d).await?; @@ -206,3 +308,157 @@ impl AsyncDecodable for MoneroPowData { }) } } + +/// Create a set of ordered transaction hashes from a Monero block +pub fn create_ordered_tx_hashes_from_block(block: &monero::Block) -> Vec { + iter::once(block.miner_tx.hash()).chain(block.tx_hashes.clone()).collect() +} + +/// Inserts aux chain merkle root and info into a Monero block +pub fn insert_aux_chain_mr_and_info_into_block>( + block: &mut monero::Block, + aux_chain_mr: T, + aux_chain_count: u8, + aux_nonce: u32, +) -> Result<()> { + if aux_chain_count == 0 { + return Err(MoneroMergeMineError("Zero aux chains".to_string())) + } + + if aux_chain_mr.as_ref().len() != monero::Hash::len_bytes() { + return Err(MoneroMergeMineError("Aux chain root invalid length".to_string())) + } + + // When we insert the Merge Mining tag, we need to make sure + // that the extra field is valid. + let mut extra_field = match ExtraField::try_parse(&block.miner_tx.prefix.extra) { + Ok(v) => v, + Err(e) => return Err(MoneroMergeMineError(e.to_string())), + }; + + // Adding more than one Merge Mining tag is not allowed + for item in &extra_field.0 { + if let SubField::MergeMining(_, _) = item { + return Err(MoneroMergeMineError("More than one mm tag in coinbase".to_string())) + } + } + + // If `SubField::Padding(n)` with `n < 255` is the last subfield in the + // extra field, then appending a new field will always fail to deserialize + // (`ExtraField::try_parse`) - the new field cannot be parsed in that + // sequence. + // To circumvent this, we create a new extra field by appending the + // original extra field to the merge mining field instead. + let hash = monero::Hash::from_slice(aux_chain_mr.as_ref()); + let encoded = if aux_chain_count == 1 { + monero::VarInt(0) + } else { + let mt_params = MerkleTreeParameters::new(aux_chain_count, aux_nonce)?; + mt_params.to_varint() + }; + extra_field.0.insert(0, SubField::MergeMining(encoded, hash)); + + block.miner_tx.prefix.extra = extra_field.into(); + + // Let's test the block to ensure it serializes correctly. + let blocktemplate_ser = monero::consensus::serialize(block); + let blocktemplate_hex = blocktemplate_ser.hex(); + let blocktemplate_bytes = decode_hex(&blocktemplate_hex) + .collect::, _>>() + .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; + let de_block: monero::Block = match monero::consensus::deserialize(&blocktemplate_bytes) { + Ok(v) => v, + Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidData, e).into()), + }; + + if block != &de_block { + return Err(MoneroMergeMineError("Blocks don't match after serialization".to_string())) + } + + Ok(()) +} + +/* +/// Creates a hex-encoded Monero `blockhashing_blob` +fn create_block_hashing_blob( + header: &monero::BlockHeader, + merkle_root: &monero::Hash, + transaction_count: u64, +) -> Vec { + let mut blockhashing_blob = monero::consensus::serialize(header); + blockhashing_blob.extend_from_slice(merkle_root.as_bytes()); + let mut count = monero::consensus::serialize(&monero::VarInt(transaction_count)); + blockhashing_blob.append(&mut count); + blockhashing_blob +} + +/// Creates a hex-encoded Monero `blockhashing_blob` that's used by the PoW hash +fn create_blockhashing_blob_from_blob(block: &monero::Block) -> Result { + let tx_hashes = create_ordered_tx_hashes_from_block(block); + let root = tree_hash(&tx_hashes)?; + let blob = create_block_hashing_blob(&block.header, &root, tx_hashes.len() as u64); + Ok(blob.hex()) +} +*/ + +/// Try to decode a `monero::Block` given a hex blob +pub fn monero_block_deserialize(blob: &str) -> Result { + let bytes: Vec = decode_hex(blob) + .collect::, _>>() + .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?; + + let mut reader = Cursor::new(bytes); + + match monero::Block::consensus_decode(&mut reader) { + Ok(v) => Ok(v), + Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e).into()), + } +} + +/// Parsing an extra field from bytes will always return an extra field with +/// subfields that could be read even if it does not represent the original +// extra field. +/// As per Monero consensus rules, an error here will not represent failure +/// to deserialize a block, so no need to error here. +fn parse_extra_field_truncate_on_error(raw_extra_field: &RawExtraField) -> ExtraField { + match ExtraField::try_parse(raw_extra_field) { + Ok(v) => v, + Err(v) => { + warn!( + target: "blockchain::monero::parse_extra_field_truncate_on_error", + "[BLOCKCHAIN] Some Monero tx_extra subfields could not be parsed", + ); + v + } + } +} + +/// Extract the Monero block hash from the coinbase transaction's extra field +pub fn extract_aux_merkle_root_from_block(monero: &monero::Block) -> Result> { + // When we extract the merge mining hash, we do not care if the extra + // field can be parsed without error. + let extra_field = parse_extra_field_truncate_on_error(&monero.miner_tx.prefix.extra); + + // Only one merge mining tag is allowed + let merge_mining_hashes: Vec = extra_field + .0 + .iter() + .filter_map(|item| { + if let SubField::MergeMining(_depth, merge_mining_hash) = item { + Some(*merge_mining_hash) + } else { + None + } + }) + .collect(); + + if merge_mining_hashes.len() > 1 { + return Err(MoneroMergeMineError("More than one MM tag found in coinbase".to_string())) + } + + if let Some(merge_mining_hash) = merge_mining_hashes.into_iter().next() { + Ok(Some(merge_mining_hash)) + } else { + Ok(None) + } +} diff --git a/src/blockchain/monero/utils.rs b/src/blockchain/monero/utils.rs index abf3cdef2..672818a47 100644 --- a/src/blockchain/monero/utils.rs +++ b/src/blockchain/monero/utils.rs @@ -16,10 +16,12 @@ * along with this program. If not, see . */ -use monero::Hash; +use monero::{Hash, VarInt}; +use primitive_types::U256; +use sha2::{Digest, Sha256}; -use super::MerkleProof; -use crate::{Error, Result}; +use super::{MerkleProof, MerkleTreeParameters, MoneroPowData}; +use crate::{blockchain::HeaderHash, Error, Result}; /// Returns the Keccak 256 hash of the byte input pub fn cn_fast_hash(data: &[u8]) -> Hash { @@ -181,3 +183,59 @@ pub fn create_merkle_proof(hashes: &[Hash], hash: &Hash) -> Option } } } + +/// Creates a hex-encoded Monero blockhashing_blob +pub fn create_blockhashing_blob( + header: &monero::BlockHeader, + merkle_root: &monero::Hash, + transaction_count: u64, +) -> Vec { + let mut blockhashing_blob = monero::consensus::serialize(header); + blockhashing_blob.extend_from_slice(merkle_root.as_bytes()); + let mut count = monero::consensus::serialize(&VarInt(transaction_count)); + blockhashing_blob.append(&mut count); + blockhashing_blob +} + +#[allow(unused)] +fn check_aux_chains( + monero_data: &MoneroPowData, + merge_mining_params: VarInt, + aux_chain_merkle_root: &monero::Hash, + darkfi_hash: HeaderHash, + darkfi_genesis_hash: HeaderHash, +) -> bool { + let df_hash = monero::Hash::from_slice(darkfi_hash.as_slice()); + + if merge_mining_params == VarInt(0) { + // Interpret 0 as only 1 chain + if df_hash == *aux_chain_merkle_root { + return true + } + } + + let merkle_tree_params = MerkleTreeParameters::from_varint(merge_mining_params); + if merkle_tree_params.number_of_chains() == 0 { + return false + } + + let hash_position = U256::from_little_endian( + &Sha256::new() + .chain_update(darkfi_genesis_hash.as_slice()) + .chain_update(merkle_tree_params.aux_nonce().to_le_bytes()) + .chain_update((109_u8).to_le_bytes()) + .finalize(), + ) + .low_u32() % + u32::from(merkle_tree_params.number_of_chains()); + + let (merkle_root, pos) = monero_data + .aux_chain_merkle_proof + .calculate_root_with_pos(&df_hash, merkle_tree_params.number_of_chains()); + + if hash_position != pos { + return false + } + + merkle_root == *aux_chain_merkle_root +} diff --git a/src/validator/mod.rs b/src/validator/mod.rs index 7f2cc1d5c..9be2b9146 100644 --- a/src/validator/mod.rs +++ b/src/validator/mod.rs @@ -47,9 +47,6 @@ use pow::PoWModule; pub mod randomx_factory; pub use randomx_factory::RandomXFactory; -/// Monero infrastructure -pub mod xmr; - /// Verification functions pub mod verification; use verification::{ diff --git a/src/validator/pow.rs b/src/validator/pow.rs index 5ecea2f21..2ce31ded5 100644 --- a/src/validator/pow.rs +++ b/src/validator/pow.rs @@ -29,7 +29,7 @@ use darkfi_sdk::num_traits::{One, Zero}; use num_bigint::BigUint; use randomx::{RandomXCache, RandomXDataset, RandomXFlags, RandomXVM}; use smol::channel::Receiver; -use tracing::{debug, info}; +use tracing::debug; use crate::{ blockchain::{ @@ -338,7 +338,7 @@ impl PoWModule { target: "validator::pow::verify_block", "[VERIFIER] Creating Monero PoW RandomXCache", ); - let randomx_key = &powdata.randomx_key[..]; + let randomx_key = powdata.randomx_key(); let cache = RandomXCache::new(flags, randomx_key)?; let vm = self.monero_rx_factory.create(randomx_key, Some(cache), None)?; @@ -349,7 +349,7 @@ impl PoWModule { ); let verification_time = Instant::now(); - let out_hash = vm.calculate_hash(&powdata.create_block_hashing_blob())?; + let out_hash = vm.calculate_hash(&powdata.to_blockhashing_blob())?; (BigUint::from_bytes_le(&out_hash), verification_time) } }; diff --git a/src/validator/xmr/helpers.rs b/src/validator/xmr/helpers.rs deleted file mode 100644 index f63b6edce..000000000 --- a/src/validator/xmr/helpers.rs +++ /dev/null @@ -1,239 +0,0 @@ -/* This file is part of DarkFi (https://dark.fi) - * - * Copyright (C) 2020-2025 Dyne.org foundation - * Copyright (C) 2021 The Tari Project (BSD-3) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as - * published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use std::{io, iter}; - -use monero::{ - blockdata::transaction::{ExtraField, RawExtraField, SubField}, - consensus::Encodable as XmrEncodable, - cryptonote::hash::Hashable, - VarInt, -}; -use primitive_types::U256; -use sha2::{Digest, Sha256}; -use tiny_keccak::{Hasher, Keccak}; -use tracing::warn; - -use super::merkle_tree_parameters::MerkleTreeParameters; -use crate::{ - blockchain::{ - header_store::HeaderHash, - monero::{ - fixed_array::FixedByteArray, - utils::{create_merkle_proof, tree_hash}, - MoneroPowData, - }, - }, - Error, - Error::MoneroMergeMineError, - Result, -}; - -/// Deserializes the given hex-encoded string into a Monero block -pub fn deserialize_monero_block_from_hex(data: T) -> io::Result -where - T: AsRef<[u8]>, -{ - let bytes = hex::decode(data).map_err(|_| io::Error::other("Invalid hex data"))?; - let obj = monero::consensus::deserialize::(&bytes) - .map_err(|_| io::Error::other("Invalid XMR block"))?; - Ok(obj) -} - -/// Serializes the given Monero block into a hex-encoded string -pub fn serialize_monero_block_to_hex(obj: &monero::Block) -> io::Result { - let data = monero::consensus::serialize::(obj); - let bytes = hex::encode(data); - Ok(bytes) -} - -/// Create a set of ordered tx hashes from a Monero block -pub fn create_ordered_tx_hashes_from_block(block: &monero::Block) -> Vec { - iter::once(block.miner_tx.hash()).chain(block.tx_hashes.clone()).collect() -} - -/// Creates a hex-encoded Monero blockhashing_blob -pub fn create_blockhashing_blob( - header: &monero::BlockHeader, - merkle_root: &monero::Hash, - transaction_count: u64, -) -> Vec { - let mut blockhashing_blob = monero::consensus::serialize(header); - blockhashing_blob.extend_from_slice(merkle_root.as_bytes()); - let mut count = monero::consensus::serialize(&VarInt(transaction_count)); - blockhashing_blob.append(&mut count); - blockhashing_blob -} - -/// Constructs [`MoneroPowData`] from the given block and seed -pub fn construct_monero_data( - block: monero::Block, - seed: FixedByteArray, - ordered_aux_chain_hashes: Vec, - darkfi_hash: HeaderHash, -) -> Result { - let hashes = create_ordered_tx_hashes_from_block(&block); - let root = tree_hash(&hashes)?; - - let coinbase_merkle_proof = create_merkle_proof(&hashes, &hashes[0]).ok_or_else(|| { - MoneroMergeMineError( - "create_merkle_proof returned None because the block had no coinbase".to_string(), - ) - })?; - - let coinbase = block.miner_tx.clone(); - - let mut keccak = Keccak::v256(); - let mut encoder_prefix = vec![]; - - coinbase - .prefix - .version - .consensus_encode(&mut encoder_prefix) - .map_err(|e| MoneroMergeMineError(e.to_string()))?; - - coinbase - .prefix - .unlock_time - .consensus_encode(&mut encoder_prefix) - .map_err(|e| MoneroMergeMineError(e.to_string()))?; - - coinbase - .prefix - .inputs - .consensus_encode(&mut encoder_prefix) - .map_err(|e| MoneroMergeMineError(e.to_string()))?; - - coinbase - .prefix - .outputs - .consensus_encode(&mut encoder_prefix) - .map_err(|e| MoneroMergeMineError(e.to_string()))?; - - keccak.update(&encoder_prefix); - - let t_hash = monero::Hash::from_slice(darkfi_hash.as_slice()); - let aux_chain_merkle_proof = create_merkle_proof(&ordered_aux_chain_hashes, &t_hash).ok_or_else(|| { - MoneroMergeMineError( - "create_merkle_proof returned None, could not find darkfi hash in ordered aux chain hashes".to_string(), - ) - })?; - - Ok(MoneroPowData { - header: block.header, - randomx_key: seed, - transaction_count: hashes.len() as u16, - merkle_root: root, - coinbase_merkle_proof, - coinbase_tx_extra: block.miner_tx.prefix.extra, - coinbase_tx_hasher: keccak, - aux_chain_merkle_proof, - }) -} - -fn check_aux_chains( - monero_data: &MoneroPowData, - merge_mining_params: VarInt, - aux_chain_merkle_root: &monero::Hash, - darkfi_hash: HeaderHash, - darkfi_genesis_hash: HeaderHash, -) -> bool { - let df_hash = monero::Hash::from_slice(darkfi_hash.as_slice()); - - if merge_mining_params == VarInt(0) { - // Interpret 0 as only 1 chain - if df_hash == *aux_chain_merkle_root { - return true - } - } - - let merkle_tree_params = MerkleTreeParameters::from_varint(merge_mining_params); - if merkle_tree_params.number_of_chains() == 0 { - return false - } - - let hash_position = U256::from_little_endian( - &Sha256::new() - .chain_update(darkfi_genesis_hash.as_slice()) - .chain_update(merkle_tree_params.aux_nonce().to_le_bytes()) - .chain_update((109_u8).to_le_bytes()) - .finalize(), - ) - .low_u32() % - u32::from(merkle_tree_params.number_of_chains()); - - let (merkle_root, pos) = monero_data - .aux_chain_merkle_proof - .calculate_root_with_pos(&df_hash, merkle_tree_params.number_of_chains()); - - if hash_position != pos { - return false - } - - merkle_root == *aux_chain_merkle_root -} - -// Parsing an extra field from bytes will always return an extra field with sub-fields -// that could be read, even if it does not represent the original extra field. As per -// Monero consensus rules, an error here will not represent a failure to deserialize a -// block, so no need to error here. -fn parse_extra_field_truncate_on_error(raw_extra_field: &RawExtraField) -> ExtraField { - match ExtraField::try_parse(raw_extra_field) { - Ok(val) => val, - Err(val) => { - warn!( - target: "validator::xmr::helpers", - "[MERGEMINING] Some sub-fields could not be parsed from the Monero coinbase", - ); - val - } - } -} - -/// Extracts the Monero block hash from the coinbase transaction's extra field -pub fn extract_aux_merkle_root_from_block(monero: &monero::Block) -> Result> { - // When we extract the merge mining hash, we do not care if - // the extra field can be parsed without error. - let extra_field = parse_extra_field_truncate_on_error(&monero.miner_tx.prefix.extra); - - // Only one merge mining tag is allowed - let merge_mining_hashes: Vec = extra_field - .0 - .iter() - .filter_map(|item| { - if let SubField::MergeMining(_depth, merge_mining_hash) = item { - Some(*merge_mining_hash) - } else { - None - } - }) - .collect(); - - if merge_mining_hashes.len() > 1 { - return Err(Error::MoneroMergeMineError( - "More than one merge mining tag found in coinbase".to_string(), - )) - } - - if let Some(merge_mining_hash) = merge_mining_hashes.into_iter().next() { - Ok(Some(merge_mining_hash)) - } else { - Ok(None) - } -} diff --git a/src/validator/xmr/mod.rs b/src/validator/xmr/mod.rs deleted file mode 100644 index 5ed4f96f6..000000000 --- a/src/validator/xmr/mod.rs +++ /dev/null @@ -1,74 +0,0 @@ -/* This file is part of DarkFi (https://dark.fi) - * - * Copyright (C) 2020-2025 Dyne.org foundation - * Copyright (C) 2021 The Tari Project (BSD-3) - * - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU Affero General Public License as - * published by the Free Software Foundation, either version 3 of the - * License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU Affero General Public License for more details. - * - * You should have received a copy of the GNU Affero General Public License - * along with this program. If not, see . - */ - -use monero::{ - consensus::Encodable, - cryptonote::hash::Hashable, - util::ringct::{RctSigBase, RctType}, - Hash, -}; -use tiny_keccak::Hasher; - -use crate::blockchain::monero::MoneroPowData; - -mod helpers; -use helpers::create_blockhashing_blob; - -pub mod merkle_tree_parameters; - -impl MoneroPowData { - /// Returns true if the coinbase Merkle proof produces the `merkle_root` hash. - pub fn is_coinbase_valid_merkle_root(&self) -> bool { - let mut finalised_prefix_keccak = self.coinbase_tx_hasher.clone(); - let mut encoder_extra_field = vec![]; - self.coinbase_tx_extra.consensus_encode(&mut encoder_extra_field).unwrap(); - finalised_prefix_keccak.update(&encoder_extra_field); - let mut prefix_hash: [u8; 32] = [0u8; 32]; - finalised_prefix_keccak.finalize(&mut prefix_hash); - - let final_prefix_hash = Hash::from_slice(&prefix_hash); - - // let mut finalised_keccak = Keccak::v256(); - let rct_sig_base = RctSigBase { - rct_type: RctType::Null, - txn_fee: Default::default(), - pseudo_outs: vec![], - ecdh_info: vec![], - out_pk: vec![], - }; - - let hashes = vec![final_prefix_hash, rct_sig_base.hash(), Hash::null()]; - let encoder_final: Vec = - hashes.into_iter().flat_map(|h| Vec::from(&h.to_bytes()[..])).collect(); - let coinbase_hash = Hash::new(encoder_final); - - let merkle_root = self.coinbase_merkle_proof.calculate_root(&coinbase_hash); - (self.merkle_root == merkle_root) && self.coinbase_merkle_proof.check_coinbase_path() - } - - /// Returns the blockhashing_blob for the Monero block - pub fn to_blockhashing_blob(&self) -> Vec { - create_blockhashing_blob(&self.header, &self.merkle_root, u64::from(self.transaction_count)) - } - - /// Returns the RandomX VM key - pub fn randomx_key(&self) -> &[u8] { - self.randomx_key.as_slice() - } -}