From a5360368aa7581b5bd72fe7773c15f8772c0b20e Mon Sep 17 00:00:00 2001 From: parazyd Date: Tue, 22 Aug 2023 19:53:59 +0200 Subject: [PATCH] bin: Remove async-std dependencies. --- bin/darkfid/Cargo.toml | 1 - bin/darkfid/src/main.rs | 11 +- bin/darkfid/src/rpc_blockchain.rs | 2 +- bin/darkfid/src/rpc_tx.rs | 4 +- bin/darkfid2/Cargo.toml | 1 - bin/darkfid2/src/main.rs | 8 +- bin/darkfid2/src/proto/protocol_block.rs | 3 +- bin/darkfid2/src/proto/protocol_proposal.rs | 3 +- bin/darkfid2/src/proto/protocol_sync.rs | 3 +- bin/darkfid2/src/proto/protocol_tx.rs | 3 +- bin/darkfid2/src/tests/harness.rs | 3 +- bin/darkfid2/src/tests/mod.rs | 5 +- bin/darkirc/Cargo.toml | 1 - bin/darkirc/src/irc/client.rs | 17 +- bin/darkirc/src/irc/server.rs | 14 +- bin/darkirc/src/main.rs | 15 +- bin/darkirc/src/settings.rs | 7 +- bin/dhtd/dhtd/Cargo.toml | 1 - bin/faucetd/Cargo.toml | 1 - bin/faucetd/src/main.rs | 14 +- bin/fud/fud/Cargo.toml | 1 - bin/fud/fud/src/main.rs | 19 +- bin/fud/fud/src/proto.rs | 5 +- bin/genev/genev-cli/Cargo.toml | 2 +- bin/genev/genev-cli/src/main.rs | 62 +-- bin/genev/genevd/Cargo.toml | 1 - bin/genev/genevd/src/main.rs | 11 +- bin/genev/genevd/src/rpc.rs | 4 +- bin/lilith/Cargo.toml | 1 - bin/lilith/src/main.rs | 8 +- bin/tau/tau-cli/Cargo.toml | 4 +- bin/tau/tau-cli/src/main.rs | 417 ++++++++++---------- bin/tau/taud/Cargo.toml | 1 - bin/tau/taud/src/jsonrpc.rs | 2 +- bin/tau/taud/src/main.rs | 12 +- 35 files changed, 331 insertions(+), 336 deletions(-) diff --git a/bin/darkfid/Cargo.toml b/bin/darkfid/Cargo.toml index cf92d3d8b..f92271229 100644 --- a/bin/darkfid/Cargo.toml +++ b/bin/darkfid/Cargo.toml @@ -21,7 +21,6 @@ tinyjson = "2.5.1" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/darkfid/src/main.rs b/bin/darkfid/src/main.rs index 33ac38d52..b820cd33f 100644 --- a/bin/darkfid/src/main.rs +++ b/bin/darkfid/src/main.rs @@ -16,15 +16,12 @@ * along with this program. If not, see . */ -use std::{path::Path, str::FromStr}; +use std::{path::Path, str::FromStr, sync::Arc}; -use async_std::{ - stream::StreamExt, - sync::{Arc, Mutex}, -}; use async_trait::async_trait; use darkfi_sdk::crypto::PublicKey; use log::{error, info}; +use smol::{lock::Mutex, stream::StreamExt}; use structopt_toml::{serde::Deserialize, structopt::StructOpt, StructOptToml}; use url::Url; @@ -501,7 +498,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } }, Error::ProposalTaskStopped, - ex, + ex.clone(), ); Some(task) } else { @@ -510,7 +507,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { }; // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(ex)?; signals_handler.wait_termination(signals_task).await?; info!("Caught termination signal, cleaning up and exiting..."); diff --git a/bin/darkfid/src/rpc_blockchain.rs b/bin/darkfid/src/rpc_blockchain.rs index e1f290069..575abfa89 100644 --- a/bin/darkfid/src/rpc_blockchain.rs +++ b/bin/darkfid/src/rpc_blockchain.rs @@ -55,7 +55,7 @@ impl Darkfid { return JsonError::new(InvalidParams, None, id).into() } - let slot = match u64::from_str_radix(params[0].get::().unwrap(), 10) { + let slot = match params[0].get::().unwrap().parse::() { Ok(v) => v, Err(_) => return JsonError::new(ParseError, None, id).into(), }; diff --git a/bin/darkfid/src/rpc_tx.rs b/bin/darkfid/src/rpc_tx.rs index b19deea65..38ed6bf54 100644 --- a/bin/darkfid/src/rpc_tx.rs +++ b/bin/darkfid/src/rpc_tx.rs @@ -50,7 +50,7 @@ impl Darkfid { // Try to deserialize the transaction let tx_enc = params[0].get::().unwrap(); - let tx_bytes = match base64::decode(&tx_enc.trim()) { + let tx_bytes = match base64::decode(tx_enc.trim()) { Some(v) => v, None => { error!("[RPC] tx.simulate: Failed decoding base64 transaction"); @@ -106,7 +106,7 @@ impl Darkfid { // Try to deserialize the transaction let tx_enc = params[0].get::().unwrap(); - let tx_bytes = match base64::decode(&tx_enc.trim()) { + let tx_bytes = match base64::decode(tx_enc.trim()) { Some(v) => v, None => { error!("[RPC] tx.broadcast: Failed decoding base64 transaction"); diff --git a/bin/darkfid2/Cargo.toml b/bin/darkfid2/Cargo.toml index d1e21ba22..1286478b5 100644 --- a/bin/darkfid2/Cargo.toml +++ b/bin/darkfid2/Cargo.toml @@ -29,7 +29,6 @@ tinyjson = "2.5.1" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/darkfid2/src/main.rs b/bin/darkfid2/src/main.rs index a868fe363..7f1b21d0e 100644 --- a/bin/darkfid2/src/main.rs +++ b/bin/darkfid2/src/main.rs @@ -16,10 +16,10 @@ * along with this program. If not, see . */ -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; -use async_std::{stream::StreamExt, sync::Arc}; use log::{error, info}; +use smol::stream::StreamExt; use structopt_toml::{serde::Deserialize, structopt::StructOpt, StructOptToml}; use url::Url; @@ -233,7 +233,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } }, Error::P2PNetworkStopped, - ex, + ex.clone(), ); } else { info!("Not starting consensus P2P network"); @@ -250,7 +250,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { darkfid.validator.write().await.purge_pending_txs().await?; // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(ex)?; signals_handler.wait_termination(signals_task).await?; info!(target: "darkfid", "Caught termination signal, cleaning up and exiting..."); diff --git a/bin/darkfid2/src/proto/protocol_block.rs b/bin/darkfid2/src/proto/protocol_block.rs index c33e18516..d51012c7d 100644 --- a/bin/darkfid2/src/proto/protocol_block.rs +++ b/bin/darkfid2/src/proto/protocol_block.rs @@ -16,7 +16,8 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::sync::Arc; + use async_trait::async_trait; use log::debug; use smol::Executor; diff --git a/bin/darkfid2/src/proto/protocol_proposal.rs b/bin/darkfid2/src/proto/protocol_proposal.rs index aefddd729..61dd72ec2 100644 --- a/bin/darkfid2/src/proto/protocol_proposal.rs +++ b/bin/darkfid2/src/proto/protocol_proposal.rs @@ -16,7 +16,8 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::sync::Arc; + use async_trait::async_trait; use log::debug; use smol::Executor; diff --git a/bin/darkfid2/src/proto/protocol_sync.rs b/bin/darkfid2/src/proto/protocol_sync.rs index 7c46b3096..1f2037406 100644 --- a/bin/darkfid2/src/proto/protocol_sync.rs +++ b/bin/darkfid2/src/proto/protocol_sync.rs @@ -16,7 +16,8 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::sync::Arc; + use async_trait::async_trait; use log::{debug, error}; use smol::Executor; diff --git a/bin/darkfid2/src/proto/protocol_tx.rs b/bin/darkfid2/src/proto/protocol_tx.rs index 8e7ec6366..be57694b5 100644 --- a/bin/darkfid2/src/proto/protocol_tx.rs +++ b/bin/darkfid2/src/proto/protocol_tx.rs @@ -16,7 +16,8 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::sync::Arc; + use async_trait::async_trait; use log::debug; use smol::Executor; diff --git a/bin/darkfid2/src/tests/harness.rs b/bin/darkfid2/src/tests/harness.rs index bcaced70f..2d05a3124 100644 --- a/bin/darkfid2/src/tests/harness.rs +++ b/bin/darkfid2/src/tests/harness.rs @@ -16,9 +16,8 @@ * along with this program. If not, see . */ -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; -use async_std::sync::Arc; use darkfi::{ blockchain::{BlockInfo, Header}, net::Settings, diff --git a/bin/darkfid2/src/tests/mod.rs b/bin/darkfid2/src/tests/mod.rs index c8392f18b..751c9fcec 100644 --- a/bin/darkfid2/src/tests/mod.rs +++ b/bin/darkfid2/src/tests/mod.rs @@ -16,7 +16,8 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::sync::Arc; + use darkfi::{net::Settings, Result}; use darkfi_contract_test_harness::init_logger; use smol::Executor; @@ -74,7 +75,7 @@ async fn sync_blocks_real(ex: Arc>) -> Result<()> { #[test] fn sync_blocks() -> Result<()> { let ex = Arc::new(Executor::new()); - let (signal, shutdown) = async_std::channel::unbounded::<()>(); + let (signal, shutdown) = smol::channel::unbounded::<()>(); easy_parallel::Parallel::new().each(0..4, |_| smol::block_on(ex.run(shutdown.recv()))).finish( || { diff --git a/bin/darkirc/Cargo.toml b/bin/darkirc/Cargo.toml index fee997e14..1f35b2c0c 100644 --- a/bin/darkirc/Cargo.toml +++ b/bin/darkirc/Cargo.toml @@ -35,7 +35,6 @@ tinyjson= "2.5.1" toml = "0.7.6" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/darkirc/src/irc/client.rs b/bin/darkirc/src/irc/client.rs index db344dd06..c66547e76 100644 --- a/bin/darkirc/src/irc/client.rs +++ b/bin/darkirc/src/irc/client.rs @@ -16,21 +16,20 @@ * along with this program. If not, see . */ -use std::{collections::HashSet, net::SocketAddr}; - -use async_std::sync::{Arc, Mutex}; -use futures::{ - io::{BufReader, ReadHalf, WriteHalf}, - AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt, -}; - -use log::{debug, error, info, warn}; +use std::{collections::HashSet, sync::Arc}; use darkfi::{ event_graph::{model::Event, EventMsg}, system::Subscription, Error, Result, }; +use futures::FutureExt; +use log::{debug, error, info, warn}; +use smol::{ + io::{AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader, ReadHalf, WriteHalf}, + lock::Mutex, + net::SocketAddr, +}; use crate::{ crypto::{decrypt_privmsg, decrypt_target, encrypt_privmsg}, diff --git a/bin/darkirc/src/irc/server.rs b/bin/darkirc/src/irc/server.rs index 503f61a7a..450075b86 100644 --- a/bin/darkirc/src/irc/server.rs +++ b/bin/darkirc/src/irc/server.rs @@ -16,15 +16,15 @@ * along with this program. If not, see . */ -use std::{fs::File, net::SocketAddr}; +use std::{fs::File, sync::Arc}; use async_rustls::{rustls, TlsAcceptor}; -use async_std::{ - net::TcpListener, - sync::{Arc, Mutex}, -}; -use futures::{io::BufReader, AsyncRead, AsyncReadExt, AsyncWrite}; use log::{error, info}; +use smol::{ + io::{self, AsyncRead, AsyncWrite, BufReader}, + lock::Mutex, + net::{SocketAddr, TcpListener}, +}; use darkfi::{ event_graph::{ @@ -260,7 +260,7 @@ impl IrcServer { notifier: smol::channel::Sender<(NotifierMsg, usize)>, executor: Arc>, ) -> Result<()> { - let (reader, writer) = stream.split(); + let (reader, writer) = io::split(stream); let reader = BufReader::new(reader); // Subscription for the new client diff --git a/bin/darkirc/src/main.rs b/bin/darkirc/src/main.rs index b71cbf2ec..7546fb856 100644 --- a/bin/darkirc/src/main.rs +++ b/bin/darkirc/src/main.rs @@ -16,18 +16,13 @@ * along with this program. If not, see . */ -use std::{collections::HashMap, fs::create_dir_all}; - -use async_std::{ - stream::StreamExt, - sync::{Arc, Mutex}, - task, -}; +use std::{collections::HashMap, sync::Arc}; use chrono::{Duration, Utc}; use irc::ClientSubMsg; use log::{debug, error, info}; use rand::rngs::OsRng; +use smol::{fs::create_dir_all, lock::Mutex, stream::StreamExt}; use structopt_toml::StructOptToml; use tinyjson::JsonValue; @@ -99,12 +94,12 @@ async fn realmain(settings: Args, executor: Arc>) -> Res let datastore_path = expand_path(&settings.datastore)?; // mkdir datastore_path if not exists - create_dir_all(datastore_path.clone())?; + create_dir_all(datastore_path.clone()).await?; // Signal handling for config reload and graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(executor.clone())?; let client_sub = Subscriber::new(); - task::spawn(parse_signals(signals_handler.sighup_sub.clone(), client_sub.clone())); + executor.spawn(parse_signals(signals_handler.sighup_sub.clone(), client_sub.clone())).detach(); //////////////////// // Generate new keypair and exit diff --git a/bin/darkirc/src/settings.rs b/bin/darkirc/src/settings.rs index 932078d5a..98908b1e8 100644 --- a/bin/darkirc/src/settings.rs +++ b/bin/darkirc/src/settings.rs @@ -16,11 +16,14 @@ * along with this program. If not, see . */ -use async_std::sync::Arc; +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, +}; + use crypto_box::ChaChaBox; use log::{info, warn}; use serde::{self, Deserialize}; -use std::collections::{HashMap, HashSet}; use structopt::StructOpt; use structopt_toml::StructOptToml; use toml::Value; diff --git a/bin/dhtd/dhtd/Cargo.toml b/bin/dhtd/dhtd/Cargo.toml index e9fce7851..d999e3c4b 100644 --- a/bin/dhtd/dhtd/Cargo.toml +++ b/bin/dhtd/dhtd/Cargo.toml @@ -9,7 +9,6 @@ license = "AGPL-3.0-only" edition = "2021" [dependencies] -async-std = {version = "1.12.0", features = ["attributes"]} async-trait = "0.1.73" blake3 = "1.4.1" darkfi = {path = "../../../", features = ["dht"]} diff --git a/bin/faucetd/Cargo.toml b/bin/faucetd/Cargo.toml index 2174261b6..798134b74 100644 --- a/bin/faucetd/Cargo.toml +++ b/bin/faucetd/Cargo.toml @@ -24,7 +24,6 @@ tinyjson = "2.5.1" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/faucetd/src/main.rs b/bin/faucetd/src/main.rs index 3c4effca6..9c4321dc0 100644 --- a/bin/faucetd/src/main.rs +++ b/bin/faucetd/src/main.rs @@ -16,12 +16,8 @@ * along with this program. If not, see . */ -use std::{collections::HashMap, path::Path, str::FromStr}; +use std::{collections::HashMap, path::Path, str::FromStr, sync::Arc}; -use async_std::{ - stream::StreamExt, - sync::{Arc, Mutex, RwLock}, -}; use async_trait::async_trait; use chrono::Utc; use darkfi_money_contract::{ @@ -45,6 +41,10 @@ use darkfi_sdk::{ use darkfi_serial::{deserialize, serialize, Encodable}; use log::{debug, error, info}; use rand::rngs::OsRng; +use smol::{ + lock::{Mutex, RwLock}, + stream::StreamExt, +}; use structopt_toml::{serde::Deserialize, structopt::StructOpt, StructOptToml}; use tinyjson::JsonValue; use url::Url; @@ -774,7 +774,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } }, Error::P2PNetworkStopped, - ex, + ex.clone(), ); // TODO: I think this is not needed anymore @@ -787,7 +787,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(ex)?; signals_handler.wait_termination(signals_task).await?; info!(target: "faucetd", "Caught termination signal, cleaning up and exiting..."); diff --git a/bin/fud/fud/Cargo.toml b/bin/fud/fud/Cargo.toml index 41fcf49f8..e60f8c554 100644 --- a/bin/fud/fud/Cargo.toml +++ b/bin/fud/fud/Cargo.toml @@ -21,7 +21,6 @@ tinyjson = "2.5.1" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/fud/fud/src/main.rs b/bin/fud/fud/src/main.rs index b9000fafc..b86cfc388 100644 --- a/bin/fud/fud/src/main.rs +++ b/bin/fud/fud/src/main.rs @@ -16,17 +16,14 @@ * along with this program. If not, see . */ -use std::collections::{HashMap, HashSet}; - -use async_std::{ - channel, - fs::File, - stream::StreamExt, - sync::{Arc, RwLock}, +use std::{ + collections::{HashMap, HashSet}, + sync::Arc, }; + use async_trait::async_trait; use log::{debug, error, info, warn}; -use smol::Executor; +use smol::{channel, fs::File, lock::RwLock, stream::StreamExt, Executor}; use structopt_toml::{structopt::StructOpt, StructOptToml}; use tinyjson::JsonValue; use url::Url; @@ -528,7 +525,7 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { let chunks_router = Arc::new(RwLock::new(HashMap::new())); info!("Instantiating Geode instance"); - let geode = Geode::new(&basedir.into()).await?; + let geode = Geode::new(&basedir).await?; info!("Instantiating P2P network"); let p2p = P2p::new(args.net.into(), ex.clone()).await; @@ -608,11 +605,11 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } }, Error::P2PNetworkStopped, - ex, + ex.clone(), ); // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(ex)?; signals_handler.wait_termination(signals_task).await?; info!("Caught termination signal, cleaning up and exiting..."); diff --git a/bin/fud/fud/src/proto.rs b/bin/fud/fud/src/proto.rs index d83e99a59..5de72a679 100644 --- a/bin/fud/fud/src/proto.rs +++ b/bin/fud/fud/src/proto.rs @@ -16,9 +16,8 @@ * along with this program. If not, see . */ -use std::collections::HashSet; +use std::{collections::HashSet, sync::Arc}; -use async_std::{fs::File, io::ReadExt, sync::Arc}; use async_trait::async_trait; use darkfi::{ geode::MAX_CHUNK_SIZE, @@ -31,7 +30,7 @@ use darkfi::{ }; use darkfi_serial::{SerialDecodable, SerialEncodable}; use log::{debug, error}; -use smol::Executor; +use smol::{fs::File, io::AsyncReadExt, Executor}; use url::Url; use super::Fud; diff --git a/bin/genev/genev-cli/Cargo.toml b/bin/genev/genev-cli/Cargo.toml index fe6f5e345..da9b19ba5 100644 --- a/bin/genev/genev-cli/Cargo.toml +++ b/bin/genev/genev-cli/Cargo.toml @@ -14,7 +14,7 @@ genevd = {path = "../genevd"} darkfi = {path = "../../../", features = ["event-graph", "rpc", "bs58"]} darkfi-serial = {path = "../../../src/serial"} -async-std = {version = "1.12.0", features = ["attributes"]} +smol = "1.3.0" clap = {version = "4.3.22", features = ["derive"]} libsqlite3-sys = {version = "0.26.0", features = ["bundled-sqlcipher-vendored-openssl"]} log = "0.4.20" diff --git a/bin/genev/genev-cli/src/main.rs b/bin/genev/genev-cli/src/main.rs index e4ca6591d..9133550ac 100644 --- a/bin/genev/genev-cli/src/main.rs +++ b/bin/genev/genev-cli/src/main.rs @@ -16,6 +16,8 @@ * along with this program. If not, see . */ +use std::sync::Arc; + use clap::{Parser, Subcommand}; use darkfi::{ rpc::client::RpcClient, @@ -23,6 +25,7 @@ use darkfi::{ Result, }; use simplelog::{ColorChoice, TermLogger, TerminalMode}; +use smol::Executor; use url::Url; use genevd::GenEvent; @@ -52,44 +55,47 @@ enum SubCmd { List, } -#[async_std::main] -async fn main() -> Result<()> { +fn main() -> Result<()> { let args = Args::parse(); let log_level = get_log_level(args.verbose); let log_config = get_log_config(args.verbose); TermLogger::init(log_level, log_config, TerminalMode::Mixed, ColorChoice::Auto)?; - let rpc_client = RpcClient::new(args.endpoint, None).await?; - let gen = Gen { rpc_client }; + let executor = Arc::new(Executor::new()); - match args.command { - Some(subcmd) => match subcmd { - SubCmd::Add { values } => { - let event = GenEvent { - nick: values[0].clone(), - title: values[1].clone(), - text: values[2..].join(" "), - }; + smol::block_on(executor.run(async { + let rpc_client = RpcClient::new(args.endpoint, executor.clone()).await?; + let gen = Gen { rpc_client }; - return gen.add(event).await - } + match args.command { + Some(subcmd) => match subcmd { + SubCmd::Add { values } => { + let event = GenEvent { + nick: values[0].clone(), + title: values[1].clone(), + text: values[2..].join(" "), + }; - SubCmd::List => { - let events = gen.list().await?; - for event in events { - println!("============================="); - println!( - "- nickname: {}, title: {}, text: {}", - event.action.nick, event.action.title, event.action.text - ); + return gen.add(event).await } - } - }, - None => println!("none"), - } - gen.close_connection().await?; + SubCmd::List => { + let events = gen.list().await?; + for event in events { + println!("============================="); + println!( + "- nickname: {}, title: {}, text: {}", + event.action.nick, event.action.title, event.action.text + ); + } + } + }, + None => println!("none"), + } - Ok(()) + gen.close_connection().await?; + + Ok(()) + })) } diff --git a/bin/genev/genevd/Cargo.toml b/bin/genev/genevd/Cargo.toml index 13b4cf636..ee3fbd7a2 100644 --- a/bin/genev/genevd/Cargo.toml +++ b/bin/genev/genevd/Cargo.toml @@ -28,7 +28,6 @@ tinyjson = "2.5.1" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/genev/genevd/src/main.rs b/bin/genev/genevd/src/main.rs index 9c282ada9..453cd38a5 100644 --- a/bin/genev/genevd/src/main.rs +++ b/bin/genev/genevd/src/main.rs @@ -16,10 +16,8 @@ * along with this program. If not, see . */ -use async_std::{ - stream::StreamExt, - sync::{Arc, Mutex}, -}; +use std::sync::Arc; + use darkfi::{ async_daemonize, cli_desc, event_graph::{ @@ -35,6 +33,7 @@ use darkfi::{ }; use genevd::GenEvent; use log::{error, info}; +use smol::{lock::Mutex, stream::StreamExt}; use structopt_toml::{serde::Deserialize, structopt::StructOpt, StructOptToml}; use url::Url; @@ -174,11 +173,11 @@ async fn realmain(args: Args, executor: Arc>) -> Result< } }, Error::RPCServerStopped, - executor, + executor.clone(), ); // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(executor)?; signals_handler.wait_termination(signals_task).await?; info!("Caught termination signal, cleaning up and exiting..."); diff --git a/bin/genev/genevd/src/rpc.rs b/bin/genev/genevd/src/rpc.rs index 1f09df9ac..13280e6c0 100644 --- a/bin/genev/genevd/src/rpc.rs +++ b/bin/genev/genevd/src/rpc.rs @@ -16,9 +16,11 @@ * along with this program. If not, see . */ -use async_std::sync::{Arc, Mutex}; +use std::sync::Arc; + use async_trait::async_trait; use log::debug; +use smol::lock::Mutex; use tinyjson::JsonValue; use darkfi::{ diff --git a/bin/lilith/Cargo.toml b/bin/lilith/Cargo.toml index 3a3af9b54..88b77907d 100644 --- a/bin/lilith/Cargo.toml +++ b/bin/lilith/Cargo.toml @@ -22,7 +22,6 @@ toml = "0.7.6" url = "2.4.0" # Daemon -async-std = "1.12.0" easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/lilith/src/main.rs b/bin/lilith/src/main.rs index b2a6bf8a5..8cbf0ee1c 100644 --- a/bin/lilith/src/main.rs +++ b/bin/lilith/src/main.rs @@ -20,14 +20,14 @@ use std::{ collections::{HashMap, HashSet}, path::Path, process::exit, + sync::Arc, }; -use async_std::{stream::StreamExt, sync::Arc}; use async_trait::async_trait; use futures::future::join_all; use log::{debug, error, info, warn}; use semver::Version; -use smol::Executor; +use smol::{stream::StreamExt, Executor}; use structopt::StructOpt; use structopt_toml::StructOptToml; use tinyjson::JsonValue; @@ -481,11 +481,11 @@ async fn realmain(args: Args, ex: Arc>) -> Result<()> { } }, Error::RPCServerStopped, - ex, + ex.clone(), ); // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(ex)?; signals_handler.wait_termination(signals_task).await?; info!(target: "lilith", "Caught termination signal, cleaning up and exiting..."); diff --git a/bin/tau/tau-cli/Cargo.toml b/bin/tau/tau-cli/Cargo.toml index 7d28329af..c9a10f941 100644 --- a/bin/tau/tau-cli/Cargo.toml +++ b/bin/tau/tau-cli/Cargo.toml @@ -13,9 +13,6 @@ libc = "0.2.147" darkfi = {path = "../../../", features = ["rpc"]} taud = {path = "../taud"} -# Async -async-std = {version = "1.12.0", features = ["attributes"]} - # Misc chrono = "0.4.26" colored = "2.0.4" @@ -23,6 +20,7 @@ libsqlite3-sys = {version = "0.26.0", features = ["bundled-sqlcipher-vendored-op log = "0.4.20" prettytable-rs = "0.10.0" simplelog = "0.12.1" +smol = "1.3.0" term_grid = { git = "https://github.com/Dastan-glitch/rust-term-grid.git" } textwrap = "0.16.0" url = "2.4.0" diff --git a/bin/tau/tau-cli/src/main.rs b/bin/tau/tau-cli/src/main.rs index 75fc2dd9c..8bc46fe6b 100644 --- a/bin/tau/tau-cli/src/main.rs +++ b/bin/tau/tau-cli/src/main.rs @@ -16,11 +16,12 @@ * along with this program. If not, see . */ -use std::process::exit; +use std::{process::exit, sync::Arc}; use clap::{Parser, Subcommand}; use log::{error, info}; use simplelog::{ColorChoice, TermLogger, TerminalMode}; +use smol::Executor; use url::Url; use darkfi::{ @@ -160,244 +161,250 @@ pub struct Tau { pub rpc_client: RpcClient, } -#[async_std::main] -async fn main() -> Result<()> { +fn main() -> Result<()> { let args = Args::parse(); let log_level = get_log_level(args.verbose); let log_config = get_log_config(args.verbose); TermLogger::init(log_level, log_config, TerminalMode::Mixed, ColorChoice::Auto)?; - let rpc_client = RpcClient::new(args.endpoint, None).await?; - let tau = Tau { rpc_client }; + let executor = Arc::new(Executor::new()); - let mut filters = args.filters.clone(); + smol::block_on(executor.run(async { + let rpc_client = RpcClient::new(args.endpoint, executor.clone()).await?; + let tau = Tau { rpc_client }; - // If IDs are provided in filter we use them to get the tasks from the daemon - // then remove IDs from filter so we can do apply_filter() normally. - // If not provided we use get_ids() to get them from the daemon. - let ids = get_ids(&mut filters)?; - let ids_clone = ids.clone(); - let task_ids = if ids.is_empty() { tau.get_ids().await? } else { ids }; + let mut filters = args.filters.clone(); - let mut tasks = - if filters.contains(&"state:stop".to_string()) || filters.contains(&"all".to_string()) { + // If IDs are provided in filter we use them to get the tasks from the daemon + // then remove IDs from filter so we can do apply_filter() normally. + // If not provided we use get_ids() to get them from the daemon. + let ids = get_ids(&mut filters)?; + let ids_clone = ids.clone(); + let task_ids = if ids.is_empty() { tau.get_ids().await? } else { ids }; + + let mut tasks = if filters.contains(&"state:stop".to_string()) || + filters.contains(&"all".to_string()) + { tau.get_stop_tasks(None).await? } else { vec![] }; - for id in task_ids { - tasks.push(tau.get_task_by_id(id).await?); - } + for id in task_ids { + tasks.push(tau.get_task_by_id(id).await?); + } - if ids_clone.len() == 1 && args.command.is_none() { - let tsk = tasks[0].clone(); - print_task_info(tsk)?; + if ids_clone.len() == 1 && args.command.is_none() { + let tsk = tasks[0].clone(); + print_task_info(tsk)?; - return Ok(()) - } + return Ok(()) + } - for filter in filters { - apply_filter(&mut tasks, &filter); - } + for filter in filters { + apply_filter(&mut tasks, &filter); + } - // Parse subcommands - match args.command { - Some(sc) => match sc { - TauSubcommand::Add { values } => { - let mut task = task_from_cli(values)?; - if task.title.is_empty() { - error!("Please provide a title for the task."); - exit(1); - }; - - if task.desc.is_none() { - task.desc = prompt_text(TaskInfo::from(task.clone()), "description")?; - }; - - if task.clone().desc.unwrap().trim().is_empty() { - error!("Abort adding the task due to empty description."); - exit(1) - } - - let title = task.clone().title; - - let task_id = tau.add(task).await?; - if task_id > 0 { - println!("Created task {} \"{}\"", task_id, title); - } - Ok(()) - } - - TauSubcommand::Modify { values } => { - if args.filters.is_empty() { - no_filter_warn() - } - let base_task = task_from_cli(values)?; - for task in tasks.clone() { - let res = tau.update(task.id, base_task.clone()).await?; - if res { - let tsk = tau.get_task_by_id(task.id).await?; - print_task_info(tsk)?; - } - } - - Ok(()) - } - - TauSubcommand::Start => { - if args.filters.is_empty() { - no_filter_warn() - } - let state = State::Start; - for task in tasks { - if tau.set_state(task.id, &state).await? { - println!("Started task: {:?}", task.id); - } - } - - Ok(()) - } - - TauSubcommand::Open => { - if args.filters.is_empty() { - no_filter_warn() - } - let state = State::Open; - for task in tasks { - if tau.set_state(task.id, &state).await? { - println!("Opened task: {:?}", task.id); - } - } - - Ok(()) - } - - TauSubcommand::Pause => { - if args.filters.is_empty() { - no_filter_warn() - } - let state = State::Pause; - for task in tasks { - if tau.set_state(task.id, &state).await? { - println!("Paused task: {:?}", task.id); - } - } - - Ok(()) - } - - TauSubcommand::Stop => { - if args.filters.is_empty() { - no_filter_warn() - } - let state = State::Stop; - for task in tasks { - if tau.set_state(task.id, &state).await? { - println!("Stopped task: {}", task.id); - } - } - - Ok(()) - } - - TauSubcommand::Comment { content } => { - if args.filters.is_empty() { - no_filter_warn() - } - for task in tasks { - let comment = if content.is_empty() { - prompt_text(task.clone(), "comment")? - } else { - Some(content.join(" ")) + // Parse subcommands + match args.command { + Some(sc) => match sc { + TauSubcommand::Add { values } => { + let mut task = task_from_cli(values)?; + if task.title.is_empty() { + error!("Please provide a title for the task."); + exit(1); }; - if comment.clone().unwrap().trim().is_empty() || comment.is_none() { - error!("Abort due to empty comment."); + if task.desc.is_none() { + task.desc = prompt_text(TaskInfo::from(task.clone()), "description")?; + }; + + if task.clone().desc.unwrap().trim().is_empty() { + error!("Abort adding the task due to empty description."); exit(1) } - let res = tau.set_comment(task.id, comment.unwrap().trim()).await?; + let title = task.clone().title; + + let task_id = tau.add(task).await?; + if task_id > 0 { + println!("Created task {} \"{}\"", task_id, title); + } + Ok(()) + } + + TauSubcommand::Modify { values } => { + if args.filters.is_empty() { + no_filter_warn() + } + let base_task = task_from_cli(values)?; + for task in tasks.clone() { + let res = tau.update(task.id, base_task.clone()).await?; + if res { + let tsk = tau.get_task_by_id(task.id).await?; + print_task_info(tsk)?; + } + } + + Ok(()) + } + + TauSubcommand::Start => { + if args.filters.is_empty() { + no_filter_warn() + } + let state = State::Start; + for task in tasks { + if tau.set_state(task.id, &state).await? { + println!("Started task: {:?}", task.id); + } + } + + Ok(()) + } + + TauSubcommand::Open => { + if args.filters.is_empty() { + no_filter_warn() + } + let state = State::Open; + for task in tasks { + if tau.set_state(task.id, &state).await? { + println!("Opened task: {:?}", task.id); + } + } + + Ok(()) + } + + TauSubcommand::Pause => { + if args.filters.is_empty() { + no_filter_warn() + } + let state = State::Pause; + for task in tasks { + if tau.set_state(task.id, &state).await? { + println!("Paused task: {:?}", task.id); + } + } + + Ok(()) + } + + TauSubcommand::Stop => { + if args.filters.is_empty() { + no_filter_warn() + } + let state = State::Stop; + for task in tasks { + if tau.set_state(task.id, &state).await? { + println!("Stopped task: {}", task.id); + } + } + + Ok(()) + } + + TauSubcommand::Comment { content } => { + if args.filters.is_empty() { + no_filter_warn() + } + for task in tasks { + let comment = if content.is_empty() { + prompt_text(task.clone(), "comment")? + } else { + Some(content.join(" ")) + }; + + if comment.clone().unwrap().trim().is_empty() || comment.is_none() { + error!("Abort due to empty comment."); + exit(1) + } + + let res = tau.set_comment(task.id, comment.unwrap().trim()).await?; + if res { + let tsk = tau.get_task_by_id(task.id).await?; + print_task_info(tsk)?; + } + } + Ok(()) + } + + TauSubcommand::Info => { + for task in tasks { + let task = tau.get_task_by_id(task.id).await?; + print_task_info(task)?; + } + Ok(()) + } + + TauSubcommand::Switch { workspace } => { + if tau.switch_ws(workspace.clone()).await? { + println!("You are now on \"{}\" workspace", workspace); + } else { + println!("Workspace \"{}\" is not configured", workspace); + } + + Ok(()) + } + + TauSubcommand::Export { path } => { + let path = path.unwrap_or_else(|| DEFAULT_PATH.into()); + let res = tau.export_to(path.clone()).await?; + if res { - let tsk = tau.get_task_by_id(task.id).await?; - print_task_info(tsk)?; + info!("Exported to {}", path); + } else { + error!("Error exporting to {}", path); } - } - Ok(()) - } - TauSubcommand::Info => { - for task in tasks { - let task = tau.get_task_by_id(task.id).await?; - print_task_info(task)?; - } - Ok(()) - } - - TauSubcommand::Switch { workspace } => { - if tau.switch_ws(workspace.clone()).await? { - println!("You are now on \"{}\" workspace", workspace); - } else { - println!("Workspace \"{}\" is not configured", workspace); + Ok(()) } - Ok(()) - } + TauSubcommand::Import { path } => { + let path = path.unwrap_or_else(|| DEFAULT_PATH.into()); + let res = tau.import_from(path.clone()).await?; - TauSubcommand::Export { path } => { - let path = path.unwrap_or_else(|| DEFAULT_PATH.into()); - let res = tau.export_to(path.clone()).await?; - - if res { - info!("Exported to {}", path); - } else { - error!("Error exporting to {}", path); - } - - Ok(()) - } - - TauSubcommand::Import { path } => { - let path = path.unwrap_or_else(|| DEFAULT_PATH.into()); - let res = tau.import_from(path.clone()).await?; - - if res { - info!("Imported from {}", path); - } else { - error!("Error importing from {}", path); - } - - Ok(()) - } - - TauSubcommand::Log { month, assignee } => { - match month { - Some(date) => { - let ts = - to_naivedate(date.clone())?.and_hms_opt(12, 0, 0).unwrap().timestamp(); - let tasks = tau.get_stop_tasks(Some(ts.try_into().unwrap())).await?; - drawdown(date, tasks, assignee)?; - } - None => { - let ws = tau.get_ws().await?; - let tasks = tau.get_stop_tasks(None).await?; - print_task_list(tasks, ws)?; + if res { + info!("Imported from {}", path); + } else { + error!("Error importing from {}", path); } + + Ok(()) } - Ok(()) - } + TauSubcommand::Log { month, assignee } => { + match month { + Some(date) => { + let ts = to_naivedate(date.clone())? + .and_hms_opt(12, 0, 0) + .unwrap() + .timestamp(); + let tasks = tau.get_stop_tasks(Some(ts.try_into().unwrap())).await?; + drawdown(date, tasks, assignee)?; + } + None => { + let ws = tau.get_ws().await?; + let tasks = tau.get_stop_tasks(None).await?; + print_task_list(tasks, ws)?; + } + } - TauSubcommand::List => { + Ok(()) + } + + TauSubcommand::List => { + let ws = tau.get_ws().await?; + print_task_list(tasks, ws) + } + }, + None => { let ws = tau.get_ws().await?; print_task_list(tasks, ws) } - }, - None => { - let ws = tau.get_ws().await?; - print_task_list(tasks, ws) - } - }?; + }?; - tau.close_connection().await + tau.close_connection().await + })) } diff --git a/bin/tau/taud/Cargo.toml b/bin/tau/taud/Cargo.toml index f6bcb55e3..d3b8bd209 100644 --- a/bin/tau/taud/Cargo.toml +++ b/bin/tau/taud/Cargo.toml @@ -37,7 +37,6 @@ toml = "0.7.6" url = "2.4.0" # Daemon -async-std = {version = "1.12.0", features = ["attributes"]} easy-parallel = "3.3.0" signal-hook-async-std = "0.2.2" signal-hook = "0.3.17" diff --git a/bin/tau/taud/src/jsonrpc.rs b/bin/tau/taud/src/jsonrpc.rs index 864b8eb96..b6b4ce51e 100644 --- a/bin/tau/taud/src/jsonrpc.rs +++ b/bin/tau/taud/src/jsonrpc.rs @@ -18,10 +18,10 @@ use std::{collections::HashMap, fs::create_dir_all, path::PathBuf, sync::Arc}; -use async_std::sync::Mutex; use async_trait::async_trait; use crypto_box::ChaChaBox; use log::{debug, warn}; +use smol::lock::Mutex; use tinyjson::JsonValue; use darkfi::{ diff --git a/bin/tau/taud/src/main.rs b/bin/tau/taud/src/main.rs index 6050fc41b..b527637c4 100644 --- a/bin/tau/taud/src/main.rs +++ b/bin/tau/taud/src/main.rs @@ -16,11 +16,6 @@ * along with this program. If not, see . */ -use async_std::{ - stream::StreamExt, - sync::{Arc, Mutex}, -}; -use libc::mkfifo; use std::{ collections::HashMap, env, @@ -28,6 +23,7 @@ use std::{ fs::{create_dir_all, remove_dir_all}, io::{stdin, Write}, path::Path, + sync::Arc, }; use crypto_box::{ @@ -36,8 +32,10 @@ use crypto_box::{ }; use darkfi_serial::{deserialize, serialize, SerialDecodable, SerialEncodable}; use futures::{select, FutureExt}; +use libc::mkfifo; use log::{debug, error, info}; use rand::rngs::OsRng; +use smol::{lock::Mutex, stream::StreamExt}; use structopt_toml::StructOptToml; use tinyjson::JsonValue; @@ -424,11 +422,11 @@ async fn realmain(settings: Args, executor: Arc>) -> Res } }, Error::RPCServerStopped, - executor, + executor.clone(), ); // Signal handling for graceful termination. - let (signals_handler, signals_task) = SignalHandler::new()?; + let (signals_handler, signals_task) = SignalHandler::new(executor)?; signals_handler.wait_termination(signals_task).await?; info!("Caught termination signal, cleaning up and exiting...");