darkfid: Monero Merge Mining

This commit is contained in:
x
2025-11-13 13:51:53 +00:00
committed by skoupidi
parent f9ee611b58
commit 95a8319007
16 changed files with 977 additions and 394 deletions

12
Cargo.lock generated
View File

@@ -1833,7 +1833,6 @@ dependencies = [
"semver", "semver",
"serde", "serde",
"sha2", "sha2",
"simplelog",
"sled-overlay", "sled-overlay",
"smol", "smol",
"socket2", "socket2",
@@ -2030,6 +2029,8 @@ dependencies = [
"darkfi-serial", "darkfi-serial",
"darkfi_money_contract", "darkfi_money_contract",
"easy-parallel", "easy-parallel",
"hex",
"monero",
"num-bigint", "num-bigint",
"rand 0.8.5", "rand 0.8.5",
"serde", "serde",
@@ -4635,15 +4636,6 @@ dependencies = [
"syn 2.0.104", "syn 2.0.104",
] ]
[[package]]
name = "num_threads"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9"
dependencies = [
"libc",
]
[[package]] [[package]]
name = "objc2-core-foundation" name = "objc2-core-foundation"
version = "0.3.1" version = "0.3.1"

View File

@@ -30,6 +30,10 @@ async-trait = "0.1.88"
tinyjson = "2.5.1" tinyjson = "2.5.1"
url = "2.5.4" url = "2.5.4"
# Monero
monero = "0.21.0"
hex = "0.4.3"
# Daemon # Daemon
easy-parallel = "3.3.1" easy-parallel = "3.3.1"
signal-hook-async-std = "0.3.0" signal-hook-async-std = "0.3.0"

View File

@@ -26,6 +26,7 @@ use tracing::{debug, error, info, warn};
use url::Url; use url::Url;
use darkfi::{ use darkfi::{
blockchain::{BlockInfo, HeaderHash},
net::settings::Settings, net::settings::Settings,
rpc::{ rpc::{
jsonrpc::JsonSubscriber, jsonrpc::JsonSubscriber,
@@ -33,9 +34,15 @@ use darkfi::{
settings::RpcSettings, settings::RpcSettings,
}, },
system::{ExecutorPtr, StoppableTask, StoppableTaskPtr}, system::{ExecutorPtr, StoppableTask, StoppableTaskPtr},
validator::{Validator, ValidatorConfig, ValidatorPtr}, validator::{
consensus::Fork, utils::best_fork_index, Validator, ValidatorConfig, ValidatorPtr,
},
zk::{empty_witnesses, ProvingKey, ZkCircuit},
zkas::ZkBinary,
Error, Result, Error, Result,
}; };
use darkfi_money_contract::MONEY_CONTRACT_ZKAS_MINT_NS_V1;
use darkfi_sdk::crypto::{keypair::SecretKey, MONEY_CONTRACT_ID};
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;
@@ -77,6 +84,10 @@ pub struct DarkfiNode {
rpc_client: Option<Mutex<MinerRpcClient>>, rpc_client: Option<Mutex<MinerRpcClient>>,
/// HTTP JSON-RPC connection tracker /// HTTP JSON-RPC connection tracker
mm_rpc_connections: Mutex<HashSet<StoppableTaskPtr>>, mm_rpc_connections: Mutex<HashSet<StoppableTaskPtr>>,
/// Merge mining block templates
mm_blocktemplates: Mutex<HashMap<HeaderHash, (BlockInfo, SecretKey)>>,
/// PowRewardV1 ZK data
powrewardv1_zk: PowRewardV1Zk,
} }
impl DarkfiNode { impl DarkfiNode {
@@ -86,8 +97,10 @@ impl DarkfiNode {
txs_batch_size: usize, txs_batch_size: usize,
subscribers: HashMap<&'static str, JsonSubscriber>, subscribers: HashMap<&'static str, JsonSubscriber>,
rpc_client: Option<Mutex<MinerRpcClient>>, rpc_client: Option<Mutex<MinerRpcClient>>,
) -> DarkfiNodePtr { ) -> Result<DarkfiNodePtr> {
Arc::new(Self { let powrewardv1_zk = PowRewardV1Zk::new(validator.clone())?;
Ok(Arc::new(Self {
p2p_handler, p2p_handler,
validator, validator,
txs_batch_size, txs_batch_size,
@@ -95,7 +108,42 @@ impl DarkfiNode {
rpc_connections: Mutex::new(HashSet::new()), rpc_connections: Mutex::new(HashSet::new()),
rpc_client, rpc_client,
mm_rpc_connections: Mutex::new(HashSet::new()), mm_rpc_connections: Mutex::new(HashSet::new()),
}) mm_blocktemplates: Mutex::new(HashMap::new()),
powrewardv1_zk,
}))
}
/// Grab best current fork
pub async fn best_current_fork(&self) -> Result<Fork> {
let forks = self.validator.consensus.forks.read().await;
let index = best_fork_index(&forks)?;
forks[index].full_clone()
}
}
/// ZK data used to generate the "coinbase" transaction in a block
pub(crate) struct PowRewardV1Zk {
pub zkbin: ZkBinary,
pub provingkey: ProvingKey,
}
impl PowRewardV1Zk {
pub fn new(validator: ValidatorPtr) -> Result<Self> {
info!(
target: "darkfid::PowRewardV1Zk::new",
"Generating PowRewardV1 ZkCircuit and ProvingKey...",
);
let (zkbin, _) = validator.blockchain.contracts.get_zkas(
&validator.blockchain.sled_db,
&MONEY_CONTRACT_ID,
MONEY_CONTRACT_ZKAS_MINT_NS_V1,
)?;
let circuit = ZkCircuit::new(empty_witnesses(&zkbin)?, &zkbin);
let provingkey = ProvingKey::build(zkbin.k, &circuit);
Ok(Self { zkbin, provingkey })
} }
} }
@@ -164,8 +212,8 @@ impl Darkfid {
}; };
// Initialize node // Initialize node
let node = let node = DarkfiNode::new(p2p_handler, validator, txs_batch_size, subscribers, rpc_client)
DarkfiNode::new(p2p_handler, validator, txs_batch_size, subscribers, rpc_client).await; .await?;
// Generate the background tasks // Generate the background tasks
let dnet_task = StoppableTask::new(); let dnet_task = StoppableTask::new();

View File

@@ -139,6 +139,8 @@ impl RequestHandler<MmRpcHandler> for DarkfiNode {
// P2Pool methods requested for Monero Merge Mining // P2Pool methods requested for Monero Merge Mining
// ================================================ // ================================================
"merge_mining_get_chain_id" => self.xmr_merge_mining_get_chain_id(req.id, req.params).await, "merge_mining_get_chain_id" => self.xmr_merge_mining_get_chain_id(req.id, req.params).await,
"merge_mining_get_aux_block" => self.xmr_merge_mining_get_aux_block(req.id, req.params).await,
"merge_mining_submit_solution" => self.xmr_merge_mining_submit_solution(req.id, req.params).await,
// ============== // ==============
// Invalid method // Invalid method

View File

@@ -16,13 +16,32 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
use std::collections::HashMap; use std::{collections::HashMap, str::FromStr};
use darkfi::rpc::jsonrpc::{ErrorCode, JsonError, JsonResponse, JsonResult}; use darkfi::{
blockchain::{
header_store::PowData,
monero::{
extract_aux_merkle_root_from_block,
fixed_array::{FixedByteArray, MaxSizeVec},
merkle_proof::MerkleProof,
monero_block_deserialize, MoneroPowData,
},
HeaderHash,
},
rpc::jsonrpc::{ErrorCode, ErrorCode::InvalidParams, JsonError, JsonResponse, JsonResult},
validator::consensus::Proposal,
};
use darkfi_sdk::crypto::PublicKey;
use hex::FromHex;
use tinyjson::JsonValue; use tinyjson::JsonValue;
use tracing::error; use tracing::{error, info};
use crate::DarkfiNode; use crate::{
proto::ProposalMessage,
task::miner::{generate_next_block, MinerRewardsRecipientConfig},
DarkfiNode,
};
// https://github.com/SChernykh/p2pool/blob/master/docs/MERGE_MINING.MD // https://github.com/SChernykh/p2pool/blob/master/docs/MERGE_MINING.MD
@@ -44,17 +63,18 @@ impl DarkfiNode {
Err(e) => { Err(e) => {
error!( error!(
target: "darkfid::rpc::xmr_merge_mining_get_chain_id", target: "darkfid::rpc::xmr_merge_mining_get_chain_id",
"[RPC] Error fetching genesis block hash: {e}" "[RPC-XMR] Error fetching genesis block hash: {e}"
); );
return JsonError::new(ErrorCode::InternalError, None, id).into() return JsonError::new(ErrorCode::InternalError, None, id).into()
} }
}; };
let genesis_hex = genesis_hash.to_string(); // TODO: XXX: This should also have more specialized identifiers.
assert!(genesis_hex.len() == 32); // e.g. chain_id = H(genesis || aux_nonce || checkpoint_height)
let resp_obj = HashMap::from([("chain_id".to_string(), genesis_hex.into())]); let response =
JsonResponse::new(resp_obj.into(), id).into() HashMap::from([("chain_id".to_string(), JsonValue::from(genesis_hash.to_string()))]);
JsonResponse::new(JsonValue::from(response), id).into()
} }
// RPCAPI: // RPCAPI:
@@ -77,7 +97,146 @@ impl DarkfiNode {
// --> {"jsonrpc":"2.0", "method": "merge_mining_get_aux_block", "params": {"address": "MERGE_MINED_CHAIN_ADDRESS", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "height": 3000000, "prev_id":"ad505b0be8a49b89273e307106fa42133cbd804456724c5e7635bd953215d92a"}, "id": 1} // --> {"jsonrpc":"2.0", "method": "merge_mining_get_aux_block", "params": {"address": "MERGE_MINED_CHAIN_ADDRESS", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "height": 3000000, "prev_id":"ad505b0be8a49b89273e307106fa42133cbd804456724c5e7635bd953215d92a"}, "id": 1}
// <-- {"jsonrpc":"2.0", "result": {"aux_blob": "4c6f72656d20697073756d", "aux_diff": 123456, "aux_hash":"f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f"}, "id": 1} // <-- {"jsonrpc":"2.0", "result": {"aux_blob": "4c6f72656d20697073756d", "aux_diff": 123456, "aux_hash":"f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f"}, "id": 1}
pub async fn xmr_merge_mining_get_aux_block(&self, id: u16, params: JsonValue) -> JsonResult { pub async fn xmr_merge_mining_get_aux_block(&self, id: u16, params: JsonValue) -> JsonResult {
todo!() let Some(params) = params.get::<HashMap<String, JsonValue>>() else {
return JsonError::new(InvalidParams, None, id).into()
};
// Validate address
let Some(address) = params.get("address") else {
return JsonError::new(InvalidParams, Some("missing address".to_string()), id).into()
};
let Some(address) = address.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid address format".to_string()), id)
.into()
};
let Ok(address) = PublicKey::from_str(address) else {
return JsonError::new(InvalidParams, Some("invalid address format".to_string()), id)
.into()
};
// Validate aux_hash
let Some(aux_hash) = params.get("aux_hash") else {
return JsonError::new(InvalidParams, Some("missing aux_hash".to_string()), id).into()
};
let Some(aux_hash) = aux_hash.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id)
.into()
};
let Ok(aux_hash) = HeaderHash::from_str(aux_hash) else {
return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id)
.into()
};
// Validate height
let Some(height) = params.get("height") else {
return JsonError::new(InvalidParams, Some("missing height".to_string()), id).into()
};
let Some(height) = height.get::<f64>() else {
return JsonError::new(InvalidParams, Some("invalid height format".to_string()), id)
.into()
};
let height = *height as u64;
// Validate prev_id
let Some(prev_id) = params.get("prev_id") else {
return JsonError::new(InvalidParams, Some("missing prev_id".to_string()), id).into()
};
let Some(prev_id) = prev_id.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid prev_id format".to_string()), id)
.into()
};
let Ok(prev_id) = hex::decode(prev_id) else {
return JsonError::new(InvalidParams, Some("invalid prev_id format".to_string()), id)
.into()
};
let prev_id = monero::Hash::from_slice(&prev_id);
info!(
target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block",
"[RPC-XMR] Got blocktemplate request: address={}, aux_hash={}, height={}, prev_id={}",
address, aux_hash, height, prev_id,
);
// Method params format is correct. Let's check if we provided this
// mining job already. If so, we can just return an empty response.
// We'll also obtain a lock here to avoid getting polled multiple
// times and potentially missing a job. The lock is released when
// this function exits.
let mut mm_blocktemplates = self.mm_blocktemplates.lock().await;
if mm_blocktemplates.contains_key(&aux_hash) {
return JsonResponse::new(JsonValue::from(HashMap::new()), id).into()
}
// If it's a new job, clear the previous one(s).
mm_blocktemplates.clear();
// At this point, we should query the Validator for a new blocktemplate.
// We first need to construct `MinerRewardsRecipientConfig` from the
// address provided to us through the RPC.
let recipient_config =
MinerRewardsRecipientConfig { recipient: address, spend_hook: None, user_data: None };
// Now let's try to construct the blocktemplate.
let mut extended_fork = match self.best_current_fork().await {
Ok(f) => f,
Err(e) => {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block",
"[RPC-XMR] Finding best fork index failed: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
};
// Find the difficulty. Note we cast it to f64 here.
let difficulty: f64 = match extended_fork.module.next_difficulty() {
Ok(v) => {
// We will attempt to cast it to f64. This should always work.
v.to_string().parse().unwrap()
}
Err(e) => {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block",
"[RPC-XMR] Finding next mining difficulty failed: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
};
let (_, blocktemplate, block_signing_secret) = match generate_next_block(
&mut extended_fork,
&recipient_config,
&self.powrewardv1_zk.zkbin,
&self.powrewardv1_zk.provingkey,
self.validator.consensus.module.read().await.target,
self.validator.verify_fees,
)
.await
{
Ok(v) => v,
Err(e) => {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_get_aux_block",
"[RPC-XMR] Failed to generate next blocktemplate: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
};
// Now we have the blocktemplate. We'll mark it down in memory,
// and then ship it to RPC.
let blockhash = blocktemplate.header.template_hash();
mm_blocktemplates.insert(blockhash, (blocktemplate, block_signing_secret));
let response = JsonValue::from(HashMap::from([
("aux_blob".to_string(), JsonValue::from(blockhash.as_string())),
("aux_diff".to_string(), JsonValue::from(difficulty)),
("aux_hash".to_string(), JsonValue::from(blockhash.as_string())),
]));
info!("<-- {}", response.stringify().unwrap());
JsonResponse::new(response, id).into()
} }
// RPCAPI: // RPCAPI:
@@ -103,6 +262,264 @@ impl DarkfiNode {
// --> {"jsonrpc":"2.0", "method": "merge_mining_submit_solution", "params": {"aux_blob": "4c6f72656d20697073756d", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "blob": "...", "merkle_proof": ["hash1", "hash2", "hash3"], "path": 3, "seed_hash": "22c3d47c595ae888b5d7fc304235f92f8854644d4fad38c5680a5d4a81009fcd"}, "id": 1} // --> {"jsonrpc":"2.0", "method": "merge_mining_submit_solution", "params": {"aux_blob": "4c6f72656d20697073756d", "aux_hash": "f6952d6eef555ddd87aca66e56b91530222d6e318414816f3ba7cf5bf694bf0f", "blob": "...", "merkle_proof": ["hash1", "hash2", "hash3"], "path": 3, "seed_hash": "22c3d47c595ae888b5d7fc304235f92f8854644d4fad38c5680a5d4a81009fcd"}, "id": 1}
// <-- {"jsonrpc":"2.0", "result": {"status": "accepted"}, "id": 1} // <-- {"jsonrpc":"2.0", "result": {"status": "accepted"}, "id": 1}
pub async fn xmr_merge_mining_submit_solution(&self, id: u16, params: JsonValue) -> JsonResult { pub async fn xmr_merge_mining_submit_solution(&self, id: u16, params: JsonValue) -> JsonResult {
todo!() let Some(params) = params.get::<HashMap<String, JsonValue>>() else {
return JsonError::new(InvalidParams, None, id).into()
};
// Validate aux_blob
let Some(aux_blob) = params.get("aux_blob") else {
return JsonError::new(InvalidParams, Some("missing aux_blob".to_string()), id).into()
};
let Some(aux_blob) = aux_blob.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid aux_blob format".to_string()), id)
.into()
};
let Ok(_aux_blob) = HeaderHash::from_str(aux_blob) else {
return JsonError::new(InvalidParams, Some("invalid aux_blob format".to_string()), id)
.into()
};
// Validate aux_hash
let Some(aux_hash) = params.get("aux_hash") else {
return JsonError::new(InvalidParams, Some("missing aux_hash".to_string()), id).into()
};
let Some(aux_hash) = aux_hash.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id)
.into()
};
let Ok(aux_hash) = HeaderHash::from_str(aux_hash) else {
return JsonError::new(InvalidParams, Some("invalid aux_hash format".to_string()), id)
.into()
};
// If we don't know about this `aux_hash`, we can just abort here.
let mut mm_blocktemplates = self.mm_blocktemplates.lock().await;
if !mm_blocktemplates.contains_key(&aux_hash) {
return JsonError::new(InvalidParams, Some("unknown aux_hash".to_string()), id).into()
}
// Validate blob
let Some(blob) = params.get("blob") else {
return JsonError::new(InvalidParams, Some("missing blob".to_string()), id).into()
};
let Some(blob) = blob.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid blob format".to_string()), id).into()
};
let Ok(block) = monero_block_deserialize(blob) else {
return JsonError::new(InvalidParams, Some("invalid blob format".to_string()), id).into()
};
// Validate merkle_proof
let Some(merkle_proof_j) = params.get("merkle_proof") else {
return JsonError::new(InvalidParams, Some("missing merkle_proof".to_string()), id)
.into()
};
let Some(merkle_proof_j) = merkle_proof_j.get::<Vec<JsonValue>>() else {
return JsonError::new(
InvalidParams,
Some("invalid merkle_proof format".to_string()),
id,
)
.into()
};
let mut merkle_proof: Vec<monero::Hash> = Vec::with_capacity(merkle_proof_j.len());
for hash in merkle_proof_j.iter() {
match hash.get::<String>() {
Some(v) => {
let Ok(val) = monero::Hash::from_hex(v) else {
return JsonError::new(
InvalidParams,
Some("invalid merkle_proof format".to_string()),
id,
)
.into()
};
merkle_proof.push(val);
}
None => {
return JsonError::new(
InvalidParams,
Some("invalid merkle_proof format".to_string()),
id,
)
.into()
}
}
}
// Validate path
let Some(path) = params.get("path") else {
return JsonError::new(InvalidParams, Some("missing path".to_string()), id).into()
};
let Some(path) = path.get::<f64>() else {
return JsonError::new(InvalidParams, Some("invalid path format".to_string()), id).into()
};
let path = *path as u32;
// Validate seed_hash
let Some(seed_hash) = params.get("seed_hash") else {
return JsonError::new(InvalidParams, Some("missing seed_hash".to_string()), id).into()
};
let Some(seed_hash) = seed_hash.get::<String>() else {
return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id)
.into()
};
let Ok(seed_hash) = monero::Hash::from_hex(seed_hash) else {
return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id)
.into()
};
let Ok(seed_hash) = FixedByteArray::from_bytes(seed_hash.as_bytes()) else {
return JsonError::new(InvalidParams, Some("invalid seed_hash format".to_string()), id)
.into()
};
info!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Got solution submission: aux_hash={aux_hash}",
);
// =======================================
// Now we will validate the block contents
// =======================================
let Ok(merkle_root) = extract_aux_merkle_root_from_block(&block) else {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Extracting aux_merkle_root from XMR block failed",
);
return JsonError::new(InvalidParams, None, id).into()
};
let Some(merkle_root) = merkle_root else {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Did not find merge mining hash in block",
);
return JsonError::new(InvalidParams, None, id).into()
};
// Verify the merge mining hash
let Some(aux_hash_merkle_proof) = MerkleProof::try_construct(merkle_proof, path) else {
return JsonError::new(
InvalidParams,
Some("invalid aux_hash merkle proof".to_string()),
id,
)
.into()
};
if aux_hash_merkle_proof.calculate_root(&monero::Hash::from(aux_hash.inner())) !=
merkle_root
{
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Could not validate aux_hash Merkle root",
);
return JsonError::new(
InvalidParams,
Some("invalid aux_hash merkle proof".to_string()),
id,
)
.into()
}
// Construct MoneroPowData
let aux_chain_hashes =
MaxSizeVec::from_items_truncate(vec![monero::Hash::from(aux_hash.inner())]);
let monero_pow_data =
match MoneroPowData::new(block, seed_hash, aux_chain_hashes, *aux_hash.inner()) {
Ok(v) => v,
Err(e) => {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Failed constructing MoneroPowData: {e}",
);
return JsonError::new(
InvalidParams,
Some("failed constructing moneropowdata".to_string()),
id,
)
.into()
}
};
if !monero_pow_data.is_coinbase_valid_merkle_root() {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] MoneroPowData invalid coinbase Merkle proof",
);
return JsonError::new(
InvalidParams,
Some("invalid coinbase merkle proof".to_string()),
id,
)
.into()
}
// Append MoneroPowData to the DarkFi block and verify it.
let extended_fork = match self.best_current_fork().await {
Ok(f) => f,
Err(e) => {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Finding best fork index failed: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
};
let (block, secret) = &mm_blocktemplates.get(&aux_hash).unwrap();
let mut block = block.clone();
block.header.pow_data = PowData::Monero(monero_pow_data);
block.sign(secret);
if let Err(e) = extended_fork.module.verify_current_block(&block.header) {
error!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Failed verifying merge mined block: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
info!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Success verifying merge mined block!",
);
// At this point we should be able to clear the working job.
// We still won't release the lock in hope of proposing the block
// first.
mm_blocktemplates.clear();
// Propose the new block.
info!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Proposing new block to network",
);
let proposal = Proposal::new(block);
if let Err(e) = self.validator.append_proposal(&proposal).await {
error!(
target: "darkfid::rpc_xmr::xmr_merge_submit_solution",
"[RPC-XMR] Error proposing new block: {e}",
);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
info!(
target: "darkfid::rpc_xmr::xmr_merge_mining_submit_solution",
"[RPC-XMR] Broadcasting new block to network",
);
let message = ProposalMessage(proposal);
self.p2p_handler.p2p.broadcast(&message).await;
JsonResponse::new(
JsonValue::from(HashMap::from([(
"status".to_string(),
JsonValue::from("accepted".to_string()),
)])),
id,
)
.into()
} }
} }

View File

@@ -28,15 +28,13 @@ use darkfi::{
utils::best_fork_index, utils::best_fork_index,
verification::apply_producer_transaction, verification::apply_producer_transaction,
}, },
zk::{empty_witnesses, ProvingKey, ZkCircuit}, zk::ProvingKey,
zkas::ZkBinary, zkas::ZkBinary,
Error, Result, Error, Result,
}; };
use darkfi_money_contract::{ use darkfi_money_contract::{client::pow_reward_v1::PoWRewardCallBuilder, MoneyFunction};
client::pow_reward_v1::PoWRewardCallBuilder, MoneyFunction, MONEY_CONTRACT_ZKAS_MINT_NS_V1,
};
use darkfi_sdk::{ use darkfi_sdk::{
crypto::{poseidon_hash, FuncId, MerkleTree, PublicKey, SecretKey, MONEY_CONTRACT_ID}, crypto::{FuncId, MerkleTree, PublicKey, SecretKey, MONEY_CONTRACT_ID},
pasta::pallas, pasta::pallas,
ContractCall, ContractCall,
}; };
@@ -75,16 +73,6 @@ pub async fn miner_task(
// Initialize miner configuration // Initialize miner configuration
info!(target: "darkfid::task::miner_task", "Starting miner task..."); info!(target: "darkfid::task::miner_task", "Starting miner task...");
// Grab zkas proving keys and bin for PoWReward transaction
info!(target: "darkfid::task::miner_task", "Generating zkas bin and proving keys...");
let (zkbin, _) = node.validator.blockchain.contracts.get_zkas(
&node.validator.blockchain.sled_db,
&MONEY_CONTRACT_ID,
MONEY_CONTRACT_ZKAS_MINT_NS_V1,
)?;
let circuit = ZkCircuit::new(empty_witnesses(&zkbin)?, &zkbin);
let pk = ProvingKey::build(zkbin.k, &circuit);
// Grab blocks subscriber // Grab blocks subscriber
let block_sub = node.subscribers.get("blocks").unwrap(); let block_sub = node.subscribers.get("blocks").unwrap();
@@ -132,36 +120,28 @@ pub async fn miner_task(
// Start miner loop // Start miner loop
loop { loop {
// Grab best current fork // Grab best current fork
let forks = node.validator.consensus.forks.read().await; let extended_fork = match node.best_current_fork().await {
let index = match best_fork_index(&forks) {
Ok(i) => i,
Err(e) => {
error!(
target: "darkfid::task::miner_task",
"Finding best fork index failed: {e}"
);
continue
}
};
let extended_fork = match forks[index].full_clone() {
Ok(f) => f, Ok(f) => f,
Err(e) => { Err(e) => {
error!( error!(
target: "darkfid::task::miner_task", target: "darkfid::task::miner_task",
"Fork full clone creation failed: {e}" "Finding best fork index failed: {e}",
); );
continue continue
} }
}; };
drop(forks);
// Grab extended fork last proposal hash // Grab extended fork last proposal hash
let last_proposal_hash = extended_fork.last_proposal()?.hash; let last_proposal_hash = extended_fork.last_proposal()?.hash;
// Grab zkas proving keys and bin for PoWReward transaction
let zkbin = &node.powrewardv1_zk.zkbin;
let pk = &node.powrewardv1_zk.provingkey;
// Start listenning for network proposals and mining next block for best fork. // Start listenning for network proposals and mining next block for best fork.
match smol::future::or( match smol::future::or(
listen_to_network(node, last_proposal_hash, &subscription, &sender), listen_to_network(node, last_proposal_hash, &subscription, &sender),
mine(node, extended_fork, recipient_config, &zkbin, &pk, &stop_signal, skip_sync), mine(node, extended_fork, recipient_config, zkbin, pk, &stop_signal, skip_sync),
) )
.await .await
{ {
@@ -351,7 +331,7 @@ async fn mine_next_block(
} }
/// Auxiliary function to generate next block in an atomic manner. /// Auxiliary function to generate next block in an atomic manner.
async fn generate_next_block( pub async fn generate_next_block(
extended_fork: &mut Fork, extended_fork: &mut Fork,
recipient_config: &MinerRewardsRecipientConfig, recipient_config: &MinerRewardsRecipientConfig,
zkbin: &ZkBinary, zkbin: &ZkBinary,

View File

@@ -20,7 +20,7 @@ pub mod consensus;
pub use consensus::consensus_init_task; pub use consensus::consensus_init_task;
pub mod miner; pub mod miner;
pub use miner::miner_task; pub use miner::{generate_next_block, miner_task};
pub mod sync; pub mod sync;
pub use sync::sync_task; pub use sync::sync_task;

View File

@@ -290,7 +290,7 @@ pub async fn generate_node(
let p2p_handler = DarkfidP2pHandler::init(settings, ex).await?; let p2p_handler = DarkfidP2pHandler::init(settings, ex).await?;
let node = let node =
DarkfiNode::new(p2p_handler.clone(), validator.clone(), 50, subscribers.clone(), None) DarkfiNode::new(p2p_handler.clone(), validator.clone(), 50, subscribers.clone(), None)
.await; .await?;
p2p_handler.clone().start(ex, &validator, &subscribers).await?; p2p_handler.clone().start(ex, &validator, &subscribers).await?;

View File

@@ -18,14 +18,15 @@
use std::{ use std::{
io::{self, Read, Write}, io::{self, Read, Write},
ops::Deref, marker::PhantomData,
ops::{Deref, DerefMut},
}; };
#[cfg(feature = "async-serial")] #[cfg(feature = "async-serial")]
use darkfi_serial::{ use darkfi_serial::{
async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt,
}; };
use darkfi_serial::{Decodable, Encodable, ReadExt, WriteExt}; use darkfi_serial::{Decodable, Encodable, ReadExt, SerialDecodable, SerialEncodable, WriteExt};
const MAX_ARR_SIZE: usize = 60; const MAX_ARR_SIZE: usize = 60;
@@ -69,6 +70,21 @@ impl FixedByteArray {
pub fn to_vec(&self) -> Vec<u8> { pub fn to_vec(&self) -> Vec<u8> {
self.as_slice().to_vec() self.as_slice().to_vec()
} }
pub fn from_bytes(bytes: &[u8]) -> io::Result<Self> {
if bytes.len() > MAX_ARR_SIZE {
return Err(io::Error::new(io::ErrorKind::OutOfMemory, "Slice too large"))
}
let len = u8::try_from(bytes.len()).map_err(|_| io::ErrorKind::OutOfMemory)?;
let mut elems = [0u8; MAX_ARR_SIZE];
elems
.get_mut(..len as usize)
.expect("Cannot fail")
.copy_from_slice(bytes.get(..len as usize).expect("Cannot fail"));
Ok(Self { elems, len })
}
} }
impl Deref for FixedByteArray { impl Deref for FixedByteArray {
@@ -121,7 +137,7 @@ impl Decodable for FixedByteArray {
if len > MAX_ARR_SIZE { if len > MAX_ARR_SIZE {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidInput, io::ErrorKind::InvalidInput,
format!("length exceeded max of 60 bytes for FixedByteArray: {}", len), format!("length exceeded max of 60 bytes for FixedByteArray: {len}"),
)); ));
} }
@@ -143,7 +159,7 @@ impl AsyncDecodable for FixedByteArray {
if len > MAX_ARR_SIZE { if len > MAX_ARR_SIZE {
return Err(io::Error::new( return Err(io::Error::new(
io::ErrorKind::InvalidInput, io::ErrorKind::InvalidInput,
format!("length exceeded max of 60 bytes for FixedByteArray: {}", len), format!("length exceeded max of 60 bytes for FixedByteArray: {len}"),
)); ));
} }
@@ -157,6 +173,132 @@ impl AsyncDecodable for FixedByteArray {
} }
} }
/// A vector that has a maximum size of `MAX_SIZE`
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, SerialEncodable, SerialDecodable)]
pub struct MaxSizeVec<T, const MAX_SIZE: usize>
where
T: Send + Sync,
{
vec: Vec<T>,
_marker: PhantomData<T>,
}
impl<T, const MAX_SIZE: usize> Default for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
fn default() -> Self {
Self::new()
}
}
impl<T, const MAX_SIZE: usize> MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
/// Creates a new `MaxSizeVec` with a capacity of `MAX_SIZE`
pub fn new() -> Self {
Self { vec: Vec::new(), _marker: PhantomData }
}
/// Creates a new `MaxSizeVec` with the given data.
/// Returns an error if the data length exceeds `MAX_SIZE`.
pub fn new_with_data(data: Vec<T>) -> io::Result<Self> {
if data.len() > MAX_SIZE {
return Err(io::Error::new(io::ErrorKind::StorageFull, "Size exceeded"))
}
Ok(Self { vec: data, _marker: PhantomData })
}
/// Creates a `MaxSizeVec` from the given items, truncating if needed
pub fn from_items_truncate(items: Vec<T>) -> Self {
let len = std::cmp::min(items.len(), MAX_SIZE);
Self { vec: items.into_iter().take(len).collect(), _marker: PhantomData }
}
/// Consumes `MaxSizeVec` and returns the inner `Vec<T>`
pub fn into_vec(self) -> Vec<T> {
self.vec
}
/// Returns the maximum size of the `MaxSizeVec`
pub fn max_size(&self) -> usize {
MAX_SIZE
}
/// Pushes an item to the `MaxSizeVec`
pub fn push(&mut self, item: T) -> io::Result<()> {
if self.vec.len() >= MAX_SIZE {
return Err(io::Error::new(io::ErrorKind::StorageFull, "Size exceeded"))
}
self.vec.push(item);
Ok(())
}
}
impl<T, const MAX_SIZE: usize> AsRef<[T]> for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
fn as_ref(&self) -> &[T] {
&self.vec
}
}
impl<T, const MAX_SIZE: usize> Deref for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
type Target = [T];
fn deref(&self) -> &Self::Target {
&self.vec
}
}
impl<T, const MAX_SIZE: usize> DerefMut for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.vec
}
}
impl<T, const MAX_SIZE: usize> Iterator for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
if self.vec.is_empty() {
None
} else {
Some(self.vec.remove(0))
}
}
}
impl<T, const MAX_SIZE: usize> FromIterator<T> for MaxSizeVec<T, MAX_SIZE>
where
T: Send + Sync,
{
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut vec = vec![];
for item in iter {
if vec.len() >= MAX_SIZE {
break
}
vec.push(item);
}
Self { vec, _marker: PhantomData }
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@@ -19,20 +19,27 @@
use std::{ use std::{
fmt, fmt,
io::{self, Cursor, Error, Read, Write}, io::{self, Cursor, Error, Read, Write},
iter,
}; };
use darkfi_sdk::{hex::decode_hex, AsHex};
#[cfg(feature = "async-serial")] #[cfg(feature = "async-serial")]
use darkfi_serial::{async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncWrite}; use darkfi_serial::{async_trait, AsyncDecodable, AsyncEncodable, AsyncRead, AsyncWrite};
use darkfi_serial::{Decodable, Encodable}; use darkfi_serial::{Decodable, Encodable};
use monero::{ use monero::{
blockdata::transaction::RawExtraField, blockdata::transaction::{ExtraField, RawExtraField, SubField},
consensus::{Decodable as XmrDecodable, Encodable as XmrEncodable}, consensus::{Decodable as XmrDecodable, Encodable as XmrEncodable},
BlockHeader, Hash, cryptonote::hash::Hashable,
util::ringct::{RctSigBase, RctType},
BlockHeader,
}; };
use tiny_keccak::{Hasher, Keccak}; use tiny_keccak::{Hasher, Keccak};
use tracing::warn;
use crate::{Error::MoneroMergeMineError, Result};
pub mod fixed_array; pub mod fixed_array;
use fixed_array::FixedByteArray; use fixed_array::{FixedByteArray, MaxSizeVec};
pub mod merkle_proof; pub mod merkle_proof;
use merkle_proof::MerkleProof; use merkle_proof::MerkleProof;
@@ -41,6 +48,12 @@ pub mod keccak;
use keccak::{keccak_from_bytes, keccak_to_bytes}; use keccak::{keccak_from_bytes, keccak_to_bytes};
pub mod utils; pub mod utils;
use utils::{create_blockhashing_blob, create_merkle_proof, tree_hash};
pub mod merkle_tree_parameters;
pub use merkle_tree_parameters::MerkleTreeParameters;
pub type AuxChainHashes = MaxSizeVec<monero::Hash, 128>;
/// This struct represents all the Proof of Work information required /// This struct represents all the Proof of Work information required
/// for merge mining. /// for merge mining.
@@ -49,13 +62,12 @@ pub struct MoneroPowData {
/// Monero Header fields /// Monero Header fields
pub header: BlockHeader, pub header: BlockHeader,
/// RandomX VM key - length varies to a max len of 60. /// RandomX VM key - length varies to a max len of 60.
/// TODO: Implement a type, or use randomx_key[0] to define len.
pub randomx_key: FixedByteArray, pub randomx_key: FixedByteArray,
/// The number of transactions included in this Monero block. /// The number of transactions included in this Monero block.
/// This is used to produce the blockhashing_blob. /// This is used to produce the blockhashing_blob.
pub transaction_count: u16, pub transaction_count: u16,
/// Transaction root /// Transaction root
pub merkle_root: Hash, pub merkle_root: monero::Hash,
/// Coinbase Merkle proof hashes /// Coinbase Merkle proof hashes
pub coinbase_merkle_proof: MerkleProof, pub coinbase_merkle_proof: MerkleProof,
/// Incomplete hashed state of the coinbase transaction /// Incomplete hashed state of the coinbase transaction
@@ -66,6 +78,96 @@ pub struct MoneroPowData {
pub aux_chain_merkle_proof: MerkleProof, pub aux_chain_merkle_proof: MerkleProof,
} }
impl MoneroPowData {
/// Constructs the Monero PoW data from the given block and seed
pub fn new(
block: monero::Block,
seed: FixedByteArray,
ordered_aux_chain_hashes: AuxChainHashes,
darkfi_hash: [u8; 32],
) -> Result<Self> {
let hashes = create_ordered_tx_hashes_from_block(&block);
let root = tree_hash(&hashes)?;
let hash =
hashes.first().ok_or(MoneroMergeMineError("No hashes for Merkle proof".to_string()))?;
let coinbase_merkle_proof = create_merkle_proof(&hashes, hash).ok_or_else(|| {
MoneroMergeMineError(
"create_merkle_proof returned None because the block has no coinbase".to_string(),
)
})?;
let coinbase = block.miner_tx.clone();
let mut keccak = Keccak::v256();
let mut encoder_prefix = vec![];
coinbase.prefix.version.consensus_encode(&mut encoder_prefix)?;
coinbase.prefix.unlock_time.consensus_encode(&mut encoder_prefix)?;
coinbase.prefix.inputs.consensus_encode(&mut encoder_prefix)?;
coinbase.prefix.outputs.consensus_encode(&mut encoder_prefix)?;
keccak.update(&encoder_prefix);
let d_hash = monero::Hash::from_slice(darkfi_hash.as_slice());
let aux_chain_merkle_proof = create_merkle_proof(&ordered_aux_chain_hashes, &d_hash).ok_or_else(|| {
MoneroMergeMineError("create_merkle_proof returned none, could not find darkfi hash in aux chain hashes".to_string())
})?;
Ok(Self {
header: block.header,
randomx_key: seed,
transaction_count: hashes.len() as u16,
merkle_root: root,
coinbase_merkle_proof,
coinbase_tx_extra: block.miner_tx.prefix.extra,
coinbase_tx_hasher: keccak,
aux_chain_merkle_proof,
})
}
/// Returns `true` if the coinbase Merkle proof produces the `merkle_root`
/// hash, otherwise `false`.
pub fn is_coinbase_valid_merkle_root(&self) -> bool {
let mut finalised_prefix_keccak = self.coinbase_tx_hasher.clone();
let mut encoder_extra_field = vec![];
self.coinbase_tx_extra.consensus_encode(&mut encoder_extra_field).unwrap();
finalised_prefix_keccak.update(&encoder_extra_field);
let mut prefix_hash: [u8; 32] = [0u8; 32];
finalised_prefix_keccak.finalize(&mut prefix_hash);
let final_prefix_hash = monero::Hash::from_slice(&prefix_hash);
// let mut finalised_keccak = Keccak::v256();
let rct_sig_base = RctSigBase {
rct_type: RctType::Null,
txn_fee: Default::default(),
pseudo_outs: vec![],
ecdh_info: vec![],
out_pk: vec![],
};
let hashes = vec![final_prefix_hash, rct_sig_base.hash(), monero::Hash::null()];
let encoder_final: Vec<u8> =
hashes.into_iter().flat_map(|h| Vec::from(&h.to_bytes()[..])).collect();
let coinbase_hash = monero::Hash::new(encoder_final);
let merkle_root = self.coinbase_merkle_proof.calculate_root(&coinbase_hash);
(self.merkle_root == merkle_root) && self.coinbase_merkle_proof.check_coinbase_path()
}
/// Returns the blockhashing_blob for the Monero block
pub fn to_blockhashing_blob(&self) -> Vec<u8> {
create_blockhashing_blob(&self.header, &self.merkle_root, u64::from(self.transaction_count))
}
/// Returns the RandomX VM key
pub fn randomx_key(&self) -> &[u8] {
self.randomx_key.as_slice()
}
}
impl fmt::Debug for MoneroPowData { impl fmt::Debug for MoneroPowData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut digest = [0u8; 32]; let mut digest = [0u8; 32];
@@ -144,7 +246,7 @@ impl Decodable for MoneroPowData {
let transaction_count: u16 = Decodable::decode(d)?; let transaction_count: u16 = Decodable::decode(d)?;
let merkle_root = let merkle_root =
Hash::consensus_decode(d).map_err(|_| Error::other("Invalid XMR hash"))?; monero::Hash::consensus_decode(d).map_err(|_| Error::other("Invalid XMR hash"))?;
let coinbase_merkle_proof: MerkleProof = Decodable::decode(d)?; let coinbase_merkle_proof: MerkleProof = Decodable::decode(d)?;
@@ -182,8 +284,8 @@ impl AsyncDecodable for MoneroPowData {
let buf: Vec<u8> = AsyncDecodable::decode_async(d).await?; let buf: Vec<u8> = AsyncDecodable::decode_async(d).await?;
let mut buf = Cursor::new(buf); let mut buf = Cursor::new(buf);
let merkle_root = let merkle_root = monero::Hash::consensus_decode(&mut buf)
Hash::consensus_decode(&mut buf).map_err(|_| Error::other("Invalid XMR hash"))?; .map_err(|_| Error::other("Invalid XMR hash"))?;
let coinbase_merkle_proof: MerkleProof = AsyncDecodable::decode_async(d).await?; let coinbase_merkle_proof: MerkleProof = AsyncDecodable::decode_async(d).await?;
@@ -206,3 +308,157 @@ impl AsyncDecodable for MoneroPowData {
}) })
} }
} }
/// Create a set of ordered transaction hashes from a Monero block
pub fn create_ordered_tx_hashes_from_block(block: &monero::Block) -> Vec<monero::Hash> {
iter::once(block.miner_tx.hash()).chain(block.tx_hashes.clone()).collect()
}
/// Inserts aux chain merkle root and info into a Monero block
pub fn insert_aux_chain_mr_and_info_into_block<T: AsRef<[u8]>>(
block: &mut monero::Block,
aux_chain_mr: T,
aux_chain_count: u8,
aux_nonce: u32,
) -> Result<()> {
if aux_chain_count == 0 {
return Err(MoneroMergeMineError("Zero aux chains".to_string()))
}
if aux_chain_mr.as_ref().len() != monero::Hash::len_bytes() {
return Err(MoneroMergeMineError("Aux chain root invalid length".to_string()))
}
// When we insert the Merge Mining tag, we need to make sure
// that the extra field is valid.
let mut extra_field = match ExtraField::try_parse(&block.miner_tx.prefix.extra) {
Ok(v) => v,
Err(e) => return Err(MoneroMergeMineError(e.to_string())),
};
// Adding more than one Merge Mining tag is not allowed
for item in &extra_field.0 {
if let SubField::MergeMining(_, _) = item {
return Err(MoneroMergeMineError("More than one mm tag in coinbase".to_string()))
}
}
// If `SubField::Padding(n)` with `n < 255` is the last subfield in the
// extra field, then appending a new field will always fail to deserialize
// (`ExtraField::try_parse`) - the new field cannot be parsed in that
// sequence.
// To circumvent this, we create a new extra field by appending the
// original extra field to the merge mining field instead.
let hash = monero::Hash::from_slice(aux_chain_mr.as_ref());
let encoded = if aux_chain_count == 1 {
monero::VarInt(0)
} else {
let mt_params = MerkleTreeParameters::new(aux_chain_count, aux_nonce)?;
mt_params.to_varint()
};
extra_field.0.insert(0, SubField::MergeMining(encoded, hash));
block.miner_tx.prefix.extra = extra_field.into();
// Let's test the block to ensure it serializes correctly.
let blocktemplate_ser = monero::consensus::serialize(block);
let blocktemplate_hex = blocktemplate_ser.hex();
let blocktemplate_bytes = decode_hex(&blocktemplate_hex)
.collect::<std::result::Result<Vec<_>, _>>()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let de_block: monero::Block = match monero::consensus::deserialize(&blocktemplate_bytes) {
Ok(v) => v,
Err(e) => return Err(io::Error::new(io::ErrorKind::InvalidData, e).into()),
};
if block != &de_block {
return Err(MoneroMergeMineError("Blocks don't match after serialization".to_string()))
}
Ok(())
}
/*
/// Creates a hex-encoded Monero `blockhashing_blob`
fn create_block_hashing_blob(
header: &monero::BlockHeader,
merkle_root: &monero::Hash,
transaction_count: u64,
) -> Vec<u8> {
let mut blockhashing_blob = monero::consensus::serialize(header);
blockhashing_blob.extend_from_slice(merkle_root.as_bytes());
let mut count = monero::consensus::serialize(&monero::VarInt(transaction_count));
blockhashing_blob.append(&mut count);
blockhashing_blob
}
/// Creates a hex-encoded Monero `blockhashing_blob` that's used by the PoW hash
fn create_blockhashing_blob_from_blob(block: &monero::Block) -> Result<String> {
let tx_hashes = create_ordered_tx_hashes_from_block(block);
let root = tree_hash(&tx_hashes)?;
let blob = create_block_hashing_blob(&block.header, &root, tx_hashes.len() as u64);
Ok(blob.hex())
}
*/
/// Try to decode a `monero::Block` given a hex blob
pub fn monero_block_deserialize(blob: &str) -> Result<monero::Block> {
let bytes: Vec<u8> = decode_hex(blob)
.collect::<std::result::Result<Vec<_>, _>>()
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
let mut reader = Cursor::new(bytes);
match monero::Block::consensus_decode(&mut reader) {
Ok(v) => Ok(v),
Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e).into()),
}
}
/// Parsing an extra field from bytes will always return an extra field with
/// subfields that could be read even if it does not represent the original
// extra field.
/// As per Monero consensus rules, an error here will not represent failure
/// to deserialize a block, so no need to error here.
fn parse_extra_field_truncate_on_error(raw_extra_field: &RawExtraField) -> ExtraField {
match ExtraField::try_parse(raw_extra_field) {
Ok(v) => v,
Err(v) => {
warn!(
target: "blockchain::monero::parse_extra_field_truncate_on_error",
"[BLOCKCHAIN] Some Monero tx_extra subfields could not be parsed",
);
v
}
}
}
/// Extract the Monero block hash from the coinbase transaction's extra field
pub fn extract_aux_merkle_root_from_block(monero: &monero::Block) -> Result<Option<monero::Hash>> {
// When we extract the merge mining hash, we do not care if the extra
// field can be parsed without error.
let extra_field = parse_extra_field_truncate_on_error(&monero.miner_tx.prefix.extra);
// Only one merge mining tag is allowed
let merge_mining_hashes: Vec<monero::Hash> = extra_field
.0
.iter()
.filter_map(|item| {
if let SubField::MergeMining(_depth, merge_mining_hash) = item {
Some(*merge_mining_hash)
} else {
None
}
})
.collect();
if merge_mining_hashes.len() > 1 {
return Err(MoneroMergeMineError("More than one MM tag found in coinbase".to_string()))
}
if let Some(merge_mining_hash) = merge_mining_hashes.into_iter().next() {
Ok(Some(merge_mining_hash))
} else {
Ok(None)
}
}

View File

@@ -16,10 +16,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
use monero::Hash; use monero::{Hash, VarInt};
use primitive_types::U256;
use sha2::{Digest, Sha256};
use super::MerkleProof; use super::{MerkleProof, MerkleTreeParameters, MoneroPowData};
use crate::{Error, Result}; use crate::{blockchain::HeaderHash, Error, Result};
/// Returns the Keccak 256 hash of the byte input /// Returns the Keccak 256 hash of the byte input
pub fn cn_fast_hash(data: &[u8]) -> Hash { pub fn cn_fast_hash(data: &[u8]) -> Hash {
@@ -181,3 +183,59 @@ pub fn create_merkle_proof(hashes: &[Hash], hash: &Hash) -> Option<MerkleProof>
} }
} }
} }
/// Creates a hex-encoded Monero blockhashing_blob
pub fn create_blockhashing_blob(
header: &monero::BlockHeader,
merkle_root: &monero::Hash,
transaction_count: u64,
) -> Vec<u8> {
let mut blockhashing_blob = monero::consensus::serialize(header);
blockhashing_blob.extend_from_slice(merkle_root.as_bytes());
let mut count = monero::consensus::serialize(&VarInt(transaction_count));
blockhashing_blob.append(&mut count);
blockhashing_blob
}
#[allow(unused)]
fn check_aux_chains(
monero_data: &MoneroPowData,
merge_mining_params: VarInt,
aux_chain_merkle_root: &monero::Hash,
darkfi_hash: HeaderHash,
darkfi_genesis_hash: HeaderHash,
) -> bool {
let df_hash = monero::Hash::from_slice(darkfi_hash.as_slice());
if merge_mining_params == VarInt(0) {
// Interpret 0 as only 1 chain
if df_hash == *aux_chain_merkle_root {
return true
}
}
let merkle_tree_params = MerkleTreeParameters::from_varint(merge_mining_params);
if merkle_tree_params.number_of_chains() == 0 {
return false
}
let hash_position = U256::from_little_endian(
&Sha256::new()
.chain_update(darkfi_genesis_hash.as_slice())
.chain_update(merkle_tree_params.aux_nonce().to_le_bytes())
.chain_update((109_u8).to_le_bytes())
.finalize(),
)
.low_u32() %
u32::from(merkle_tree_params.number_of_chains());
let (merkle_root, pos) = monero_data
.aux_chain_merkle_proof
.calculate_root_with_pos(&df_hash, merkle_tree_params.number_of_chains());
if hash_position != pos {
return false
}
merkle_root == *aux_chain_merkle_root
}

View File

@@ -47,9 +47,6 @@ use pow::PoWModule;
pub mod randomx_factory; pub mod randomx_factory;
pub use randomx_factory::RandomXFactory; pub use randomx_factory::RandomXFactory;
/// Monero infrastructure
pub mod xmr;
/// Verification functions /// Verification functions
pub mod verification; pub mod verification;
use verification::{ use verification::{

View File

@@ -29,7 +29,7 @@ use darkfi_sdk::num_traits::{One, Zero};
use num_bigint::BigUint; use num_bigint::BigUint;
use randomx::{RandomXCache, RandomXDataset, RandomXFlags, RandomXVM}; use randomx::{RandomXCache, RandomXDataset, RandomXFlags, RandomXVM};
use smol::channel::Receiver; use smol::channel::Receiver;
use tracing::{debug, info}; use tracing::debug;
use crate::{ use crate::{
blockchain::{ blockchain::{
@@ -338,7 +338,7 @@ impl PoWModule {
target: "validator::pow::verify_block", target: "validator::pow::verify_block",
"[VERIFIER] Creating Monero PoW RandomXCache", "[VERIFIER] Creating Monero PoW RandomXCache",
); );
let randomx_key = &powdata.randomx_key[..]; let randomx_key = powdata.randomx_key();
let cache = RandomXCache::new(flags, randomx_key)?; let cache = RandomXCache::new(flags, randomx_key)?;
let vm = self.monero_rx_factory.create(randomx_key, Some(cache), None)?; let vm = self.monero_rx_factory.create(randomx_key, Some(cache), None)?;
@@ -349,7 +349,7 @@ impl PoWModule {
); );
let verification_time = Instant::now(); let verification_time = Instant::now();
let out_hash = vm.calculate_hash(&powdata.create_block_hashing_blob())?; let out_hash = vm.calculate_hash(&powdata.to_blockhashing_blob())?;
(BigUint::from_bytes_le(&out_hash), verification_time) (BigUint::from_bytes_le(&out_hash), verification_time)
} }
}; };

View File

@@ -1,239 +0,0 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2025 Dyne.org foundation
* Copyright (C) 2021 The Tari Project (BSD-3)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{io, iter};
use monero::{
blockdata::transaction::{ExtraField, RawExtraField, SubField},
consensus::Encodable as XmrEncodable,
cryptonote::hash::Hashable,
VarInt,
};
use primitive_types::U256;
use sha2::{Digest, Sha256};
use tiny_keccak::{Hasher, Keccak};
use tracing::warn;
use super::merkle_tree_parameters::MerkleTreeParameters;
use crate::{
blockchain::{
header_store::HeaderHash,
monero::{
fixed_array::FixedByteArray,
utils::{create_merkle_proof, tree_hash},
MoneroPowData,
},
},
Error,
Error::MoneroMergeMineError,
Result,
};
/// Deserializes the given hex-encoded string into a Monero block
pub fn deserialize_monero_block_from_hex<T>(data: T) -> io::Result<monero::Block>
where
T: AsRef<[u8]>,
{
let bytes = hex::decode(data).map_err(|_| io::Error::other("Invalid hex data"))?;
let obj = monero::consensus::deserialize::<monero::Block>(&bytes)
.map_err(|_| io::Error::other("Invalid XMR block"))?;
Ok(obj)
}
/// Serializes the given Monero block into a hex-encoded string
pub fn serialize_monero_block_to_hex(obj: &monero::Block) -> io::Result<String> {
let data = monero::consensus::serialize::<monero::Block>(obj);
let bytes = hex::encode(data);
Ok(bytes)
}
/// Create a set of ordered tx hashes from a Monero block
pub fn create_ordered_tx_hashes_from_block(block: &monero::Block) -> Vec<monero::Hash> {
iter::once(block.miner_tx.hash()).chain(block.tx_hashes.clone()).collect()
}
/// Creates a hex-encoded Monero blockhashing_blob
pub fn create_blockhashing_blob(
header: &monero::BlockHeader,
merkle_root: &monero::Hash,
transaction_count: u64,
) -> Vec<u8> {
let mut blockhashing_blob = monero::consensus::serialize(header);
blockhashing_blob.extend_from_slice(merkle_root.as_bytes());
let mut count = monero::consensus::serialize(&VarInt(transaction_count));
blockhashing_blob.append(&mut count);
blockhashing_blob
}
/// Constructs [`MoneroPowData`] from the given block and seed
pub fn construct_monero_data(
block: monero::Block,
seed: FixedByteArray,
ordered_aux_chain_hashes: Vec<monero::Hash>,
darkfi_hash: HeaderHash,
) -> Result<MoneroPowData> {
let hashes = create_ordered_tx_hashes_from_block(&block);
let root = tree_hash(&hashes)?;
let coinbase_merkle_proof = create_merkle_proof(&hashes, &hashes[0]).ok_or_else(|| {
MoneroMergeMineError(
"create_merkle_proof returned None because the block had no coinbase".to_string(),
)
})?;
let coinbase = block.miner_tx.clone();
let mut keccak = Keccak::v256();
let mut encoder_prefix = vec![];
coinbase
.prefix
.version
.consensus_encode(&mut encoder_prefix)
.map_err(|e| MoneroMergeMineError(e.to_string()))?;
coinbase
.prefix
.unlock_time
.consensus_encode(&mut encoder_prefix)
.map_err(|e| MoneroMergeMineError(e.to_string()))?;
coinbase
.prefix
.inputs
.consensus_encode(&mut encoder_prefix)
.map_err(|e| MoneroMergeMineError(e.to_string()))?;
coinbase
.prefix
.outputs
.consensus_encode(&mut encoder_prefix)
.map_err(|e| MoneroMergeMineError(e.to_string()))?;
keccak.update(&encoder_prefix);
let t_hash = monero::Hash::from_slice(darkfi_hash.as_slice());
let aux_chain_merkle_proof = create_merkle_proof(&ordered_aux_chain_hashes, &t_hash).ok_or_else(|| {
MoneroMergeMineError(
"create_merkle_proof returned None, could not find darkfi hash in ordered aux chain hashes".to_string(),
)
})?;
Ok(MoneroPowData {
header: block.header,
randomx_key: seed,
transaction_count: hashes.len() as u16,
merkle_root: root,
coinbase_merkle_proof,
coinbase_tx_extra: block.miner_tx.prefix.extra,
coinbase_tx_hasher: keccak,
aux_chain_merkle_proof,
})
}
fn check_aux_chains(
monero_data: &MoneroPowData,
merge_mining_params: VarInt,
aux_chain_merkle_root: &monero::Hash,
darkfi_hash: HeaderHash,
darkfi_genesis_hash: HeaderHash,
) -> bool {
let df_hash = monero::Hash::from_slice(darkfi_hash.as_slice());
if merge_mining_params == VarInt(0) {
// Interpret 0 as only 1 chain
if df_hash == *aux_chain_merkle_root {
return true
}
}
let merkle_tree_params = MerkleTreeParameters::from_varint(merge_mining_params);
if merkle_tree_params.number_of_chains() == 0 {
return false
}
let hash_position = U256::from_little_endian(
&Sha256::new()
.chain_update(darkfi_genesis_hash.as_slice())
.chain_update(merkle_tree_params.aux_nonce().to_le_bytes())
.chain_update((109_u8).to_le_bytes())
.finalize(),
)
.low_u32() %
u32::from(merkle_tree_params.number_of_chains());
let (merkle_root, pos) = monero_data
.aux_chain_merkle_proof
.calculate_root_with_pos(&df_hash, merkle_tree_params.number_of_chains());
if hash_position != pos {
return false
}
merkle_root == *aux_chain_merkle_root
}
// Parsing an extra field from bytes will always return an extra field with sub-fields
// that could be read, even if it does not represent the original extra field. As per
// Monero consensus rules, an error here will not represent a failure to deserialize a
// block, so no need to error here.
fn parse_extra_field_truncate_on_error(raw_extra_field: &RawExtraField) -> ExtraField {
match ExtraField::try_parse(raw_extra_field) {
Ok(val) => val,
Err(val) => {
warn!(
target: "validator::xmr::helpers",
"[MERGEMINING] Some sub-fields could not be parsed from the Monero coinbase",
);
val
}
}
}
/// Extracts the Monero block hash from the coinbase transaction's extra field
pub fn extract_aux_merkle_root_from_block(monero: &monero::Block) -> Result<Option<monero::Hash>> {
// When we extract the merge mining hash, we do not care if
// the extra field can be parsed without error.
let extra_field = parse_extra_field_truncate_on_error(&monero.miner_tx.prefix.extra);
// Only one merge mining tag is allowed
let merge_mining_hashes: Vec<monero::Hash> = extra_field
.0
.iter()
.filter_map(|item| {
if let SubField::MergeMining(_depth, merge_mining_hash) = item {
Some(*merge_mining_hash)
} else {
None
}
})
.collect();
if merge_mining_hashes.len() > 1 {
return Err(Error::MoneroMergeMineError(
"More than one merge mining tag found in coinbase".to_string(),
))
}
if let Some(merge_mining_hash) = merge_mining_hashes.into_iter().next() {
Ok(Some(merge_mining_hash))
} else {
Ok(None)
}
}

View File

@@ -1,74 +0,0 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2025 Dyne.org foundation
* Copyright (C) 2021 The Tari Project (BSD-3)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use monero::{
consensus::Encodable,
cryptonote::hash::Hashable,
util::ringct::{RctSigBase, RctType},
Hash,
};
use tiny_keccak::Hasher;
use crate::blockchain::monero::MoneroPowData;
mod helpers;
use helpers::create_blockhashing_blob;
pub mod merkle_tree_parameters;
impl MoneroPowData {
/// Returns true if the coinbase Merkle proof produces the `merkle_root` hash.
pub fn is_coinbase_valid_merkle_root(&self) -> bool {
let mut finalised_prefix_keccak = self.coinbase_tx_hasher.clone();
let mut encoder_extra_field = vec![];
self.coinbase_tx_extra.consensus_encode(&mut encoder_extra_field).unwrap();
finalised_prefix_keccak.update(&encoder_extra_field);
let mut prefix_hash: [u8; 32] = [0u8; 32];
finalised_prefix_keccak.finalize(&mut prefix_hash);
let final_prefix_hash = Hash::from_slice(&prefix_hash);
// let mut finalised_keccak = Keccak::v256();
let rct_sig_base = RctSigBase {
rct_type: RctType::Null,
txn_fee: Default::default(),
pseudo_outs: vec![],
ecdh_info: vec![],
out_pk: vec![],
};
let hashes = vec![final_prefix_hash, rct_sig_base.hash(), Hash::null()];
let encoder_final: Vec<u8> =
hashes.into_iter().flat_map(|h| Vec::from(&h.to_bytes()[..])).collect();
let coinbase_hash = Hash::new(encoder_final);
let merkle_root = self.coinbase_merkle_proof.calculate_root(&coinbase_hash);
(self.merkle_root == merkle_root) && self.coinbase_merkle_proof.check_coinbase_path()
}
/// Returns the blockhashing_blob for the Monero block
pub fn to_blockhashing_blob(&self) -> Vec<u8> {
create_blockhashing_blob(&self.header, &self.merkle_root, u64::from(self.transaction_count))
}
/// Returns the RandomX VM key
pub fn randomx_key(&self) -> &[u8] {
self.randomx_key.as_slice()
}
}