Merge branch 'master' into raft_optimized

This commit is contained in:
ghassmo
2022-07-25 04:53:24 +04:00
12 changed files with 267 additions and 39 deletions

View File

@@ -47,3 +47,11 @@ seeds=["tls://irc0.dark.fi:11001", "tls://irc1.dark.fi:11001"]
#secret = "7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"
## Topic to set for the channel
#topic = "DarkFi Development HQ"
## Contacts list
# Shared secrets that encrypt direct communication between two nicknames on
# the network.
# These are in the form of secret:[nick0,nick1], which means that the same
# shared secret will be used for all the nicknames in the list.
[contact."7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"]
nicks = ["sneed", "chuck"]

View File

@@ -38,7 +38,10 @@ use crate::{
protocol_privmsg::ProtocolPrivmsg,
rpc::JsonRpcInterface,
server::IrcServerConnection,
settings::{parse_configured_channels, Args, ChannelInfo, CONFIG_FILE, CONFIG_FILE_CONTENTS},
settings::{
parse_configured_channels, parse_configured_contacts, Args, ChannelInfo, CONFIG_FILE,
CONFIG_FILE_CONTENTS,
},
};
const SIZE_OF_MSG_IDSS_BUFFER: usize = 65536;
@@ -53,6 +56,7 @@ struct Ircd {
// channels
autojoin_chans: Vec<String>,
configured_chans: FxHashMap<String, ChannelInfo>,
configured_contacts: FxHashMap<String, crypto_box::Box>,
// p2p
p2p: net::P2pPtr,
senders: SubscriberPtr<Privmsg>,
@@ -64,10 +68,19 @@ impl Ircd {
privmsgs_buffer: PrivmsgsBuffer,
autojoin_chans: Vec<String>,
configured_chans: FxHashMap<String, ChannelInfo>,
configured_contacts: FxHashMap<String, crypto_box::Box>,
p2p: net::P2pPtr,
) -> Self {
let senders = Subscriber::new();
Self { seen_msg_ids, privmsgs_buffer, autojoin_chans, configured_chans, p2p, senders }
Self {
seen_msg_ids,
privmsgs_buffer,
autojoin_chans,
configured_chans,
configured_contacts,
p2p,
senders,
}
}
fn start_p2p_receive_loop(&self, executor: Arc<Executor<'_>>, p2p_receiver: Receiver<Privmsg>) {
@@ -102,6 +115,7 @@ impl Ircd {
self.privmsgs_buffer.clone(),
self.autojoin_chans.clone(),
self.configured_chans.clone(),
self.configured_contacts.clone(),
self.p2p.clone(),
self.senders.clone(),
receiver.get_id(),
@@ -162,7 +176,9 @@ async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
// Pick up channel settings from the TOML configuration
let cfg_path = get_config_path(settings.config, CONFIG_FILE)?;
let configured_chans = parse_configured_channels(&cfg_path)?;
let toml_contents = std::fs::read_to_string(cfg_path)?;
let configured_chans = parse_configured_channels(&toml_contents)?;
let configured_contacts = parse_configured_contacts(&toml_contents)?;
//
// P2p setup
@@ -256,6 +272,7 @@ async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
privmsgs_buffer.clone(),
settings.autojoin.clone(),
configured_chans.clone(),
configured_contacts.clone(),
p2p.clone(),
);

View File

@@ -67,11 +67,14 @@ impl ProtocolPrivmsg {
msg.nickname = msg.nickname[..MAXIMUM_LENGTH_OF_NICKNAME].to_string();
}
if self.msg_ids.lock().await.contains(&msg.id) {
continue
}
{
let msg_ids = &mut self.msg_ids.lock().await;
if msg_ids.contains(&msg.id) {
continue
}
self.msg_ids.lock().await.push(msg.id);
msg_ids.push(msg.id);
}
// add the msg to the buffer
self.msgs.lock().await.push(msg.clone());

View File

@@ -34,6 +34,7 @@ pub struct IrcServerConnection<C: AsyncRead + AsyncWrite + Send + Unpin + 'stati
nickname: String,
auto_channels: Vec<String>,
pub configured_chans: FxHashMap<String, ChannelInfo>,
pub configured_contacts: FxHashMap<String, crypto_box::Box>,
capabilities: FxHashMap<String, bool>,
// p2p
p2p: P2pPtr,
@@ -50,6 +51,7 @@ impl<C: AsyncRead + AsyncWrite + Send + Unpin + 'static> IrcServerConnection<C>
privmsgs_buffer: PrivmsgsBuffer,
auto_channels: Vec<String>,
configured_chans: FxHashMap<String, ChannelInfo>,
configured_contacts: FxHashMap<String, crypto_box::Box>,
p2p: P2pPtr,
senders: SubscriberPtr<Privmsg>,
subscriber_id: u64,
@@ -68,6 +70,7 @@ impl<C: AsyncRead + AsyncWrite + Send + Unpin + 'static> IrcServerConnection<C>
nickname: "anon".to_string(),
auto_channels,
configured_chans,
configured_contacts,
capabilities,
p2p,
senders,
@@ -200,6 +203,12 @@ impl<C: AsyncRead + AsyncWrite + Send + Unpin + 'static> IrcServerConnection<C>
} else {
message.to_string()
};
} else {
// If we have a configured secret for this nick, we encrypt the message.
if let Some(salt_box) = self.configured_contacts.get(target) {
message = encrypt_message(salt_box, &message);
info!("(Encrypted) PRIVMSG {} :{}", target, message);
}
}
self.on_receive_privmsg(&message, target).await?;
@@ -446,13 +455,21 @@ impl<C: AsyncRead + AsyncWrite + Send + Unpin + 'static> IrcServerConnection<C>
self.reply(&msg.to_irc_msg()).await?;
return Ok(())
}
} else {
if self.is_cap_end &&
self.is_nick_init &&
(self.nickname == msg.target || self.nickname == msg.nickname)
{
if self.configured_contacts.contains_key(&msg.target) {
let salt_box = self.configured_contacts.get(&msg.target).unwrap();
if let Some(decrypted) = try_decrypt_message(&salt_box, &msg.message) {
msg.message = decrypted;
info!("Decrypted received message: {:?}", msg);
}
}
if self.is_cap_end &&
self.is_nick_init &&
(self.nickname == msg.target || self.nickname == msg.nickname)
{
self.reply(&msg.to_irc_msg()).await?;
self.reply(&msg.to_irc_msg()).await?;
}
}
Ok(())

View File

@@ -1,5 +1,3 @@
use std::path::PathBuf;
use fxhash::FxHashMap;
use log::info;
use serde::Deserialize;
@@ -83,13 +81,57 @@ impl ChannelInfo {
}
}
/// Parse the configuration file for any configured channels and return
fn salt_box_from_shared_secret(s: &str) -> Result<crypto_box::Box> {
let bytes: [u8; 32] = bs58::decode(s).into_vec()?.try_into().unwrap();
let secret = crypto_box::SecretKey::from(bytes);
let public = secret.public_key();
Ok(crypto_box::Box::new(&public, &secret))
}
/// Parse a TOML string for any configured contact list and return
/// a map containing said configurations.
pub fn parse_configured_channels(config_file: &PathBuf) -> Result<FxHashMap<String, ChannelInfo>> {
let toml_contents = std::fs::read_to_string(config_file)?;
///
/// ```toml
/// [contact."7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"]
/// nicks = ["sneed", "chuck"]
/// ```
pub fn parse_configured_contacts(data: &str) -> Result<FxHashMap<String, crypto_box::Box>> {
let mut ret = FxHashMap::default();
if let Value::Table(map) = toml::from_str(&toml_contents)? {
if let Value::Table(map) = toml::from_str(data)? {
if map.contains_key("contact") && map["contact"].is_table() {
for contact in map["contact"].as_table().unwrap() {
// (secret, nicks = [nick0, nick1])
if contact.1.as_table().unwrap().contains_key("nicks") {
if let Some(nicks) = contact.1["nicks"].as_array() {
let salt_box = salt_box_from_shared_secret(contact.0.as_str())?;
for nick in nicks {
if let Some(n) = nick.as_str() {
info!("Instantiated salt box for {}", n);
ret.insert(n.to_string(), salt_box.clone());
}
}
}
}
}
}
}
Ok(ret)
}
/// Parse a TOML string for any configured channels and return
/// a map containing said configurations.
///
/// ```toml
/// [channel."#memes"]
/// secret = "7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"
/// topic = "Dank Memes"
/// ```
pub fn parse_configured_channels(data: &str) -> Result<FxHashMap<String, ChannelInfo>> {
let mut ret = FxHashMap::default();
if let Value::Table(map) = toml::from_str(data)? {
if map.contains_key("channel") && map["channel"].is_table() {
for chan in map["channel"].as_table().unwrap() {
info!("Found configuration for channel {}", chan.0);
@@ -103,13 +145,11 @@ pub fn parse_configured_channels(config_file: &PathBuf) -> Result<FxHashMap<Stri
if chan.1.as_table().unwrap().contains_key("secret") {
// Build the NaCl box
let s = chan.1["secret"].as_str().unwrap();
let bytes: [u8; 32] = bs58::decode(s).into_vec()?.try_into().unwrap();
let secret = crypto_box::SecretKey::from(bytes);
let public = secret.public_key();
let msg_box = crypto_box::Box::new(&public, &secret);
channel_info.salt_box = Some(msg_box);
info!("Instantiated NaCl box for channel {}", chan.0);
if let Some(s) = chan.1["secret"].as_str() {
let salt_box = salt_box_from_shared_secret(s)?;
channel_info.salt_box = Some(salt_box);
info!("Instantiated NaCl box for channel {}", chan.0);
}
}
ret.insert(chan.0.to_string(), channel_info);

View File

@@ -1,18 +1,22 @@
use chrono::{Datelike, NaiveDateTime, Utc};
use serde_json::Value;
use crate::{primitives::TaskInfo, TaskEvent};
use crate::{
primitives::{State, TaskInfo},
TaskEvent,
};
/// Helper function to check task's state
fn check_task_state(task: &TaskInfo, state: &str) -> bool {
fn check_task_state(task: &TaskInfo, state: State) -> bool {
let last_state = task.events.last().unwrap_or(&TaskEvent::default()).action.clone();
state == last_state
state.to_string() == last_state
}
pub fn apply_filter(tasks: &mut Vec<TaskInfo>, filter: &str) {
match filter {
"open" => tasks.retain(|task| check_task_state(task, "open")),
"pause" => tasks.retain(|task| check_task_state(task, "pause")),
"open" => tasks.retain(|task| check_task_state(task, State::Open)),
"start" => tasks.retain(|task| check_task_state(task, State::Start)),
"pause" => tasks.retain(|task| check_task_state(task, State::Pause)),
_ if filter.len() == 4 && filter.parse::<u32>().is_ok() => {
let (month, year) =

View File

@@ -1,7 +1,7 @@
use std::{process::exit, str::FromStr};
use clap::{Parser, Subcommand};
use log::error;
use log::{error, info};
use simplelog::{ColorChoice, TermLogger, TerminalMode};
use url::Url;
@@ -21,6 +21,8 @@ use primitives::{task_from_cli, State, TaskEvent};
use util::{desc_in_editor, due_as_timestamp};
use view::{comments_as_string, print_task_info, print_task_list};
const DEFAULT_PATH: &str = "~/tau_exported_tasks";
#[derive(Parser)]
#[clap(name = "tau", version)]
struct Args {
@@ -76,6 +78,12 @@ enum TauSubcommand {
/// Tau workspace
workspace: String,
},
/// Import tasks from a specified directory.
Import { path: Option<String> },
/// Export tasks to a specified directory.
Export { path: Option<String> },
}
pub struct Tau {
@@ -153,6 +161,31 @@ async fn main() -> Result<()> {
tau.switch_ws(workspace).await?;
Ok(())
}
TauSubcommand::Export { path } => {
let path = path.unwrap_or(DEFAULT_PATH.into());
let res = tau.export_to(path.clone()).await?;
if res {
info!("Exported to {}", path);
} else {
error!("Error exporting to {}", path);
}
Ok(())
}
TauSubcommand::Import { path } => {
let path = path.unwrap_or(DEFAULT_PATH.into());
let res = tau.import_from(path.clone()).await?;
if res {
info!("Imported from {}", path);
} else {
error!("Error importing from {}", path);
}
Ok(())
}
},
None => {
let task_ids = tau.get_ids().await?;

View File

@@ -11,6 +11,15 @@ pub enum State {
Stop,
}
impl State {
pub const fn is_start(&self) -> bool {
matches!(*self, Self::Start)
}
pub const fn is_pause(&self) -> bool {
matches!(*self, Self::Pause)
}
}
impl fmt::Display for State {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
@@ -50,6 +59,7 @@ pub struct BaseTask {
#[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct TaskInfo {
pub ref_id: String,
pub workspace: String,
pub id: u32,
pub title: String,
pub desc: String,

View File

@@ -79,4 +79,24 @@ impl Tau {
Ok(())
}
/// Export tasks.
pub async fn export_to(&self, path: String) -> Result<bool> {
let req = JsonRequest::new("export", json!([path]));
let rep = self.rpc_client.request(req).await?;
debug!("Got reply: {:?}", rep);
Ok(serde_json::from_value(rep)?)
}
/// Import tasks.
pub async fn import_from(&self, path: String) -> Result<bool> {
let req = JsonRequest::new("import", json!([path]));
let rep = self.rpc_client.request(req).await?;
debug!("Got reply: {:?}", rep);
Ok(serde_json::from_value(rep)?)
}
}

View File

@@ -1,4 +1,4 @@
use std::fmt::Write;
use std::{fmt::Write, str::FromStr};
use prettytable::{
cell,
@@ -13,7 +13,7 @@ use darkfi::{
use crate::{
filter::apply_filter,
primitives::{Comment, TaskInfo},
primitives::{Comment, State, TaskInfo},
TaskEvent,
};
@@ -46,12 +46,19 @@ pub fn print_task_list(tasks: Vec<TaskInfo>, filters: Vec<String>) -> Result<()>
min_rank = last.rank;
}
let workspace = if tasks.first().is_some() {
format!("Workspace: {}", tasks.first().unwrap().workspace.clone())
} else {
format!("Workspace: ")
};
for task in tasks {
let state = task.events.last().unwrap_or(&TaskEvent::default()).action.clone();
let state = State::from_str(&state)?;
let (max_style, min_style, mid_style, gen_style) = if state == "start" {
let (max_style, min_style, mid_style, gen_style) = if state.is_start() {
("bFg", "Fc", "Fg", "Fg")
} else if state == "pause" {
} else if state.is_pause() {
("iFYBd", "iFYBd", "iFYBd", "iFYBd")
} else {
("", "", "", "")
@@ -76,6 +83,15 @@ pub fn print_task_list(tasks: Vec<TaskInfo>, filters: Vec<String>) -> Result<()>
]));
}
let mut ws_table = table!([Fb => workspace]);
ws_table.set_format(
FormatBuilder::new()
.padding(1, 1)
.separators(&[LinePosition::Bottom], LineSeparator::new('-', ' ', ' ', ' '))
.build(),
);
ws_table.printstd();
table.printstd();
Ok(())
}

View File

@@ -1,5 +1,5 @@
use async_std::sync::{Arc, Mutex};
use std::path::PathBuf;
use std::{fs::create_dir_all, path::PathBuf};
use async_trait::async_trait;
use fxhash::FxHashMap;
@@ -12,7 +12,7 @@ use darkfi::{
jsonrpc::{ErrorCode, JsonError, JsonRequest, JsonResult},
server::RequestHandler,
},
util::Timestamp,
util::{expand_path, Timestamp},
Error,
};
@@ -58,6 +58,8 @@ impl RequestHandler for JsonRpcInterface {
Some("set_comment") => self.set_comment(params).await,
Some("get_task_by_id") => self.get_task_by_id(params).await,
Some("switch_ws") => self.switch_ws(params).await,
Some("export") => self.export_to(params).await,
Some("import") => self.import_from(params).await,
Some(_) | None => return JsonError::new(ErrorCode::MethodNotFound, None, req.id).into(),
};
@@ -235,6 +237,60 @@ impl JsonRpcInterface {
Ok(json!(true))
}
// RPCAPI:
// Export tasks.
// --> {"jsonrpc": "2.0", "method": "export_to", "params": [path], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "true", "id": 1}
async fn export_to(&self, params: &[Value]) -> TaudResult<Value> {
debug!(target: "tau", "JsonRpc::export_to() params {:?}", params);
if params.len() != 1 {
return Err(TaudError::InvalidData("len of params should be 1".into()))
}
if !params[0].is_string() {
return Err(TaudError::InvalidData("Invalid path".into()))
}
let path = expand_path(params[0].as_str().unwrap())?.join("exported_tasks");
// mkdir datastore_path if not exists
create_dir_all(path.join("month")).map_err(Error::from)?;
create_dir_all(path.join("task")).map_err(Error::from)?;
let mt = MonthTasks::load_or_create(None, &self.dataset_path)?;
let tasks = mt.objects(&self.dataset_path)?;
for task in tasks {
task.save(&path)?;
}
Ok(json!(true))
}
// RPCAPI:
// Import tasks.
// --> {"jsonrpc": "2.0", "method": "import_from", "params": [path], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "true", "id": 1}
async fn import_from(&self, params: &[Value]) -> TaudResult<Value> {
debug!(target: "tau", "JsonRpc::import_from() params {:?}", params);
if params.len() != 1 {
return Err(TaudError::InvalidData("len of params should be 1".into()))
}
if !params[0].is_string() {
return Err(TaudError::InvalidData("Invalid path".into()))
}
let path = expand_path(params[0].as_str().unwrap())?.join("exported_tasks");
let mt = MonthTasks::load_or_create(None, &path)?;
let tasks = mt.objects(&path)?;
for task in tasks {
self.notify_queue_sender.send(task).await.map_err(Error::from)?;
}
Ok(json!(true))
}
fn load_task_by_id(&self, task_id: &Value, ws: String) -> TaudResult<TaskInfo> {
let task_id: u64 = serde_json::from_value(task_id.clone())?;
let tasks = MonthTasks::load_current_open_tasks(&self.dataset_path, ws)?;

View File

@@ -4,7 +4,11 @@ pub mod util;
pub use fixed_bases::{NullifierK, OrchardFixedBases, OrchardFixedBasesFull, ValueCommitV, H};
pub const DRK_SCHNORR_DOMAIN: &[u8] = b"DarkFi_Schnorr";
/// Domain prefix used for Schnorr signatures, with `hash_to_scalar`.
pub const DRK_SCHNORR_DOMAIN: &[u8] = b"DarkFi:Schnorr";
/// Domain prefix used for block hashes, with `hash_to_curve`.
pub const BLOCK_HASH_DOMAIN: &str = "DarkFi:Block";
pub const MERKLE_DEPTH_ORCHARD: usize = 32;