mirror of
https://github.com/darkrenaissance/darkfi.git
synced 2026-01-09 22:57:59 -05:00
event_graph: remove already fixed TODOs and debug log msgs
This commit is contained in:
@@ -19,7 +19,6 @@
|
|||||||
use std::{collections::HashSet, time::UNIX_EPOCH};
|
use std::{collections::HashSet, time::UNIX_EPOCH};
|
||||||
|
|
||||||
use darkfi_serial::{async_trait, deserialize_async, Encodable, SerialDecodable, SerialEncodable};
|
use darkfi_serial::{async_trait, deserialize_async, Encodable, SerialDecodable, SerialEncodable};
|
||||||
use log::info;
|
|
||||||
use sled_overlay::{sled, SledTreeOverlay};
|
use sled_overlay::{sled, SledTreeOverlay};
|
||||||
|
|
||||||
use crate::Result;
|
use crate::Result;
|
||||||
@@ -100,7 +99,6 @@ impl Event {
|
|||||||
) -> Result<bool> {
|
) -> Result<bool> {
|
||||||
// Let's not bother with empty events
|
// Let's not bother with empty events
|
||||||
if self.content.is_empty() {
|
if self.content.is_empty() {
|
||||||
info!("content is emtpy");
|
|
||||||
return Ok(false)
|
return Ok(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -160,7 +160,6 @@ impl DAGStore {
|
|||||||
/// This is called if prune_task activates.
|
/// This is called if prune_task activates.
|
||||||
pub async fn add_dag(&mut self, dag_name: &str, genesis_event: &Event) {
|
pub async fn add_dag(&mut self, dag_name: &str, genesis_event: &Event) {
|
||||||
debug!("add_dag::dags: {}", self.dags.len());
|
debug!("add_dag::dags: {}", self.dags.len());
|
||||||
// TODO: sort dags by timestamp and drop the oldest
|
|
||||||
if self.dags.len() > DAGS_MAX_NUMBER.try_into().unwrap() {
|
if self.dags.len() > DAGS_MAX_NUMBER.try_into().unwrap() {
|
||||||
while self.dags.len() >= DAGS_MAX_NUMBER.try_into().unwrap() {
|
while self.dags.len() >= DAGS_MAX_NUMBER.try_into().unwrap() {
|
||||||
debug!("[EVENTGRAPH] dropping oldest dag");
|
debug!("[EVENTGRAPH] dropping oldest dag");
|
||||||
@@ -908,7 +907,6 @@ impl EventGraph {
|
|||||||
vec_tips
|
vec_tips
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Fix fetching all events from all dags and then order and retrun them
|
|
||||||
/// Perform a topological sort of the DAG.
|
/// Perform a topological sort of the DAG.
|
||||||
pub async fn order_events(&self) -> Vec<Event> {
|
pub async fn order_events(&self) -> Vec<Event> {
|
||||||
let mut ordered_events = VecDeque::new();
|
let mut ordered_events = VecDeque::new();
|
||||||
|
|||||||
@@ -448,7 +448,6 @@ impl ProtocolEventGraph {
|
|||||||
target: "event_graph::protocol::handle_event_req()",
|
target: "event_graph::protocol::handle_event_req()",
|
||||||
"Fetching event {:?} from DAG", event_id,
|
"Fetching event {:?} from DAG", event_id,
|
||||||
);
|
);
|
||||||
// TODO: search for the event among all the dags
|
|
||||||
events.push(self.event_graph.fetch_event(event_id).await.unwrap().unwrap());
|
events.push(self.event_graph.fetch_event(event_id).await.unwrap().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -161,7 +161,6 @@ async fn assert_dags(eg_instances: &[Arc<EventGraph>], expected_len: usize, rng:
|
|||||||
for (i, eg) in eg_instances.iter().enumerate() {
|
for (i, eg) in eg_instances.iter().enumerate() {
|
||||||
let current_genesis = eg.current_genesis.read().await;
|
let current_genesis = eg.current_genesis.read().await;
|
||||||
let dag_name = current_genesis.id().to_string();
|
let dag_name = current_genesis.id().to_string();
|
||||||
info!("dag_name: {}", dag_name);
|
|
||||||
let dag = eg.dag_store.read().await.get_dag(&dag_name);
|
let dag = eg.dag_store.read().await.get_dag(&dag_name);
|
||||||
let unreferenced_tips = eg.dag_store.read().await.find_unreferenced_tips(&dag).await;
|
let unreferenced_tips = eg.dag_store.read().await.find_unreferenced_tips(&dag).await;
|
||||||
let node_last_layer_tips = unreferenced_tips.last_key_value().unwrap().1.clone();
|
let node_last_layer_tips = unreferenced_tips.last_key_value().unwrap().1.clone();
|
||||||
@@ -225,7 +224,6 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// =========================================
|
// =========================================
|
||||||
// 1. Assert that everyone's DAG is the same
|
// 1. Assert that everyone's DAG is the same
|
||||||
// =========================================
|
// =========================================
|
||||||
info!("11111111111");
|
|
||||||
assert_dags(&eg_instances, 1, &mut rng).await;
|
assert_dags(&eg_instances, 1, &mut rng).await;
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -238,20 +236,9 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
assert!(event.parents.contains(&genesis_event_id));
|
assert!(event.parents.contains(&genesis_event_id));
|
||||||
// The node adds it to their DAG, on layer 1.
|
// The node adds it to their DAG, on layer 1.
|
||||||
let event_id = random_node.dag_insert(&[event.clone()], &dag_name).await.unwrap()[0];
|
let event_id = random_node.dag_insert(&[event.clone()], &dag_name).await.unwrap()[0];
|
||||||
info!("event id: {}", event_id);
|
|
||||||
let store = random_node.dag_store.read().await;
|
let store = random_node.dag_store.read().await;
|
||||||
let (d, tips_layers) = store.dags.get(¤t_genesis.id()).unwrap();
|
let (_, tips_layers) = store.dags.get(¤t_genesis.id()).unwrap();
|
||||||
for key in d.iter().keys() {
|
|
||||||
let x = key.unwrap();
|
|
||||||
let id = blake3::Hash::from_bytes((&x as &[u8]).try_into().unwrap());
|
|
||||||
info!("id: {}", id);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (_, i) in tips_layers.iter() {
|
|
||||||
for j in i.iter() {
|
|
||||||
info!("j: {}", j);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Since genesis was referenced, its layer (0) have been removed
|
// Since genesis was referenced, its layer (0) have been removed
|
||||||
assert_eq!(tips_layers.len(), 1);
|
assert_eq!(tips_layers.len(), 1);
|
||||||
assert!(tips_layers.last_key_value().unwrap().1.get(&event_id).is_some());
|
assert!(tips_layers.last_key_value().unwrap().1.get(&event_id).is_some());
|
||||||
@@ -265,7 +252,6 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// ====================================================
|
// ====================================================
|
||||||
// 3. Assert that everyone has the new event in the DAG
|
// 3. Assert that everyone has the new event in the DAG
|
||||||
// ====================================================
|
// ====================================================
|
||||||
info!("33333333");
|
|
||||||
assert_dags(&eg_instances, 2, &mut rng).await;
|
assert_dags(&eg_instances, 2, &mut rng).await;
|
||||||
|
|
||||||
// ==============================================================
|
// ==============================================================
|
||||||
@@ -304,7 +290,6 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
// 5. Assert that everyone has all the events
|
// 5. Assert that everyone has all the events
|
||||||
// ==========================================
|
// ==========================================
|
||||||
info!("555555555");
|
|
||||||
assert_dags(&eg_instances, 5, &mut rng).await;
|
assert_dags(&eg_instances, 5, &mut rng).await;
|
||||||
|
|
||||||
// ===========================================
|
// ===========================================
|
||||||
@@ -364,7 +349,6 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// 7. Assert that everyone has all the events
|
// 7. Assert that everyone has all the events
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// 5 events from 2. and 4. + 9 events from 6. = 14
|
// 5 events from 2. and 4. + 9 events from 6. = 14
|
||||||
info!("77777777");
|
|
||||||
assert_dags(&eg_instances, 14, &mut rng).await;
|
assert_dags(&eg_instances, 14, &mut rng).await;
|
||||||
|
|
||||||
// ============================================================
|
// ============================================================
|
||||||
@@ -403,7 +387,6 @@ async fn eventgraph_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// 9. Assert the new synced DAG has the same contents as others
|
// 9. Assert the new synced DAG has the same contents as others
|
||||||
// ============================================================
|
// ============================================================
|
||||||
// 5 events from 2. and 4. + 9 events from 6. = 14
|
// 5 events from 2. and 4. + 9 events from 6. = 14
|
||||||
info!("9999999999");
|
|
||||||
assert_dags(&eg_instances, 14, &mut rng).await;
|
assert_dags(&eg_instances, 14, &mut rng).await;
|
||||||
|
|
||||||
// Stop the P2P network
|
// Stop the P2P network
|
||||||
@@ -429,7 +412,6 @@ async fn eventgraph_chaotic_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// =========================================
|
// =========================================
|
||||||
// 1. Assert that everyone's DAG is the same
|
// 1. Assert that everyone's DAG is the same
|
||||||
// =========================================
|
// =========================================
|
||||||
info!("another 111111111111");
|
|
||||||
assert_dags(&eg_instances, 1, &mut rng).await;
|
assert_dags(&eg_instances, 1, &mut rng).await;
|
||||||
|
|
||||||
// ===========================================
|
// ===========================================
|
||||||
@@ -448,7 +430,6 @@ async fn eventgraph_chaotic_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
// 3. Assert that everyone has all the events
|
// 3. Assert that everyone has all the events
|
||||||
// ==========================================
|
// ==========================================
|
||||||
info!("another 333333333");
|
|
||||||
assert_dags(&eg_instances, n_events + 1, &mut rng).await;
|
assert_dags(&eg_instances, n_events + 1, &mut rng).await;
|
||||||
|
|
||||||
// ============================================================
|
// ============================================================
|
||||||
@@ -486,7 +467,6 @@ async fn eventgraph_chaotic_propagation_real(ex: Arc<Executor<'static>>) {
|
|||||||
// ============================================================
|
// ============================================================
|
||||||
// 5. Assert the new synced DAG has the same contents as others
|
// 5. Assert the new synced DAG has the same contents as others
|
||||||
// ============================================================
|
// ============================================================
|
||||||
info!("another 555555555");
|
|
||||||
assert_dags(&eg_instances, n_events + 1, &mut rng).await;
|
assert_dags(&eg_instances, n_events + 1, &mut rng).await;
|
||||||
|
|
||||||
// Stop the P2P network
|
// Stop the P2P network
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ use std::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
use darkfi_serial::{deserialize, deserialize_async, serialize};
|
use darkfi_serial::{deserialize, deserialize_async, serialize};
|
||||||
use log::{error, info};
|
use log::error;
|
||||||
use sled_overlay::sled;
|
use sled_overlay::sled;
|
||||||
use tinyjson::JsonValue;
|
use tinyjson::JsonValue;
|
||||||
|
|
||||||
@@ -125,7 +125,6 @@ pub fn generate_genesis(days_rotation: u64) -> Event {
|
|||||||
// Calculate the timestamp of the most recent event
|
// Calculate the timestamp of the most recent event
|
||||||
INITIAL_GENESIS + (rotations_since_genesis * days_rotation * DAY as u64)
|
INITIAL_GENESIS + (rotations_since_genesis * days_rotation * DAY as u64)
|
||||||
};
|
};
|
||||||
info!("generate ts: {}", timestamp);
|
|
||||||
Event {
|
Event {
|
||||||
timestamp,
|
timestamp,
|
||||||
content: GENESIS_CONTENTS.to_vec(),
|
content: GENESIS_CONTENTS.to_vec(),
|
||||||
|
|||||||
Reference in New Issue
Block a user