Update and clean up dependencies.

This commit is contained in:
parazyd
2022-12-13 15:18:53 +01:00
parent c600b447f1
commit 88caee7f42
78 changed files with 360 additions and 1572 deletions

190
Cargo.lock generated
View File

@@ -275,9 +275,9 @@ dependencies = [
[[package]]
name = "async-tungstenite"
version = "0.18.0"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b750efd83b7e716a015eed5ebb583cda83c52d9b24a8f0125e5c48c3313c9f8"
checksum = "8e6acf7e4a267eecbb127ed696bb2d50572c22ba7f586a646321e1798d8336a1"
dependencies = [
"futures-io",
"futures-util",
@@ -1047,12 +1047,12 @@ dependencies = [
[[package]]
name = "ctrlc"
version = "3.2.3"
version = "3.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d91974fbbe88ec1df0c24a4f00f99583667a7e2e6272b2b92d294d81e462173"
checksum = "1631ca6e3c59112501a9d87fd86f21591ff77acd31331e8a73f8d80a65bbdd71"
dependencies = [
"nix",
"winapi",
"windows-sys 0.42.0",
]
[[package]]
@@ -1070,9 +1070,9 @@ dependencies = [
[[package]]
name = "cxx"
version = "1.0.82"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4a41a86530d0fe7f5d9ea779916b7cadd2d4f9add748b99c2c029cbbdfaf453"
checksum = "bdf07d07d6531bfcdbe9b8b739b104610c6508dcc4d63b410585faf338241daf"
dependencies = [
"cc",
"cxxbridge-flags",
@@ -1082,9 +1082,9 @@ dependencies = [
[[package]]
name = "cxx-build"
version = "1.0.82"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06416d667ff3e3ad2df1cd8cd8afae5da26cf9cec4d0825040f88b5ca659a2f0"
checksum = "d2eb5b96ecdc99f72657332953d4d9c50135af1bac34277801cc3937906ebd39"
dependencies = [
"cc",
"codespan-reporting",
@@ -1097,15 +1097,15 @@ dependencies = [
[[package]]
name = "cxxbridge-flags"
version = "1.0.82"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "820a9a2af1669deeef27cb271f476ffd196a2c4b6731336011e0ba63e2c7cf71"
checksum = "ac040a39517fd1674e0f32177648334b0f4074625b5588a64519804ba0553b12"
[[package]]
name = "cxxbridge-macro"
version = "1.0.82"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a08a6e2fcc370a089ad3b4aaf54db3b1b4cee38ddabce5896b33eb693275f470"
checksum = "1362b0ddcfc4eb0a1f57b68bd77dd99f0e826958a96abd0ae9bd092e114ffed6"
dependencies = [
"proc-macro2",
"quote",
@@ -1119,10 +1119,8 @@ dependencies = [
"async-std",
"async-trait",
"async-tungstenite",
"blake2b_simd",
"blake3",
"bs58",
"chacha20poly1305",
"chrono",
"clap 4.0.29",
"crypto_api_chachapoly",
@@ -1133,36 +1131,27 @@ dependencies = [
"dashu",
"easy-parallel",
"ed25519-compact",
"env_logger",
"fast-socks5",
"futures",
"futures-rustls",
"fxhash",
"halo2_gadgets",
"halo2_proofs",
"hex",
"incrementalmerkletree",
"indexmap",
"indicatif",
"ipnet",
"iprange",
"itertools",
"lazy-init",
"lazy_static",
"libc",
"libsqlite3-sys",
"log",
"num-bigint",
"num-traits",
"pasta_curves",
"plotters",
"rand",
"rcell",
"rcgen",
"rustls-pemfile",
"serde",
"serde_json",
"sha2",
"simplelog",
"sled",
"smol",
@@ -1170,8 +1159,6 @@ dependencies = [
"sqlx",
"structopt",
"structopt-toml",
"subtle",
"termion 2.0.1",
"thiserror",
"toml",
"url",
@@ -1254,7 +1241,6 @@ dependencies = [
"blake3",
"darkfi-derive",
"futures-lite",
"fxhash",
"incrementalmerkletree",
"pasta_curves",
"url",
@@ -1268,18 +1254,12 @@ dependencies = [
"async-trait",
"blake3",
"bs58",
"chrono",
"ctrlc",
"darkfi",
"darkfi-sdk",
"darkfi-serial",
"easy-parallel",
"fxhash",
"incrementalmerkletree",
"lazy-init",
"log",
"pasta_curves",
"rand",
"serde",
"serde_json",
"simplelog",
@@ -1323,7 +1303,6 @@ dependencies = [
"dryoc",
"easy-parallel",
"futures",
"fxhash",
"lazy_static",
"log",
"serde",
@@ -1562,7 +1541,6 @@ dependencies = [
"clap 4.0.29",
"darkfi",
"easy-parallel",
"fxhash",
"hex",
"log",
"num_cpus",
@@ -1672,9 +1650,9 @@ checksum = "6907e25393cdcc1f4f3f513d9aac1e840eb1cc341a0fccb01171f7d14d10b946"
[[package]]
name = "ed25519-compact"
version = "2.0.2"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f2d21333b679bbbac680b3eb45c86937e42f69277028f4e97b599b80b86c253"
checksum = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c"
dependencies = [
"ct-codecs",
"getrandom 0.2.8",
@@ -1739,19 +1717,6 @@ dependencies = [
"syn",
]
[[package]]
name = "env_logger"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0"
dependencies = [
"humantime",
"is-terminal",
"log",
"regex",
"termcolor",
]
[[package]]
name = "errno"
version = "0.2.8"
@@ -2343,12 +2308,6 @@ version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
[[package]]
name = "humantime"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "iana-time-zone"
version = "0.1.53"
@@ -2465,9 +2424,9 @@ dependencies = [
[[package]]
name = "ipnet"
version = "2.5.1"
version = "2.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f88c5561171189e69df9d98bcf18fd5f9558300f7ea7b801eb8a0fd748bd8745"
checksum = "11b0d96e660696543b251e58030cf9787df56da39dab19ad60eae7353040917e"
[[package]]
name = "iprange"
@@ -2495,7 +2454,6 @@ dependencies = [
"easy-parallel",
"futures",
"futures-rustls",
"fxhash",
"hex",
"log",
"rand",
@@ -2527,7 +2485,6 @@ dependencies = [
"easy-parallel",
"futures",
"futures-rustls",
"fxhash",
"hex",
"log",
"rand",
@@ -2620,9 +2577,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
version = "0.2.137"
version = "0.2.138"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc7fcc620a3bff7cdd7a365be3376c97191aeaccc2a603e600951e452615bf89"
checksum = "db6d7e329c562c5dfab7a46a2afabc8b987ab9a4834c9d1ca04dc54c1546cef8"
[[package]]
name = "libloading"
@@ -2654,7 +2611,6 @@ dependencies = [
"ctrlc",
"darkfi",
"easy-parallel",
"fxhash",
"log",
"serde",
"serde_json",
@@ -2805,14 +2761,14 @@ dependencies = [
[[package]]
name = "nix"
version = "0.25.0"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e322c04a9e3440c327fca7b6c8a63e6890a32fa2ad689db972425f07e0d22abb"
checksum = "46a58d1d356c6597d08cde02c2f09d785b09e28711837b1ed667dc652c08a694"
dependencies = [
"autocfg",
"bitflags",
"cfg-if",
"libc",
"static_assertions",
]
[[package]]
@@ -2825,17 +2781,6 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "num-bigint"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
[[package]]
name = "num-integer"
version = "0.1.45"
@@ -2920,9 +2865,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "openssl"
version = "0.10.43"
version = "0.10.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "020433887e44c27ff16365eaa2d380547a94544ad509aff6eb5b6e3e0b27b376"
checksum = "29d971fd5722fec23977260f6e81aa67d2f22cadbdc2aa049f1022d9a3be1566"
dependencies = [
"bitflags",
"cfg-if",
@@ -2952,9 +2897,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.78"
version = "0.9.79"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "07d5c8cb6e57b3a3612064d7b18b117912b4ce70955c2504d4b741c9e244b132"
checksum = "5454462c0eced1e97f2ec09036abc8da362e66802f66fd20f86854d9d8cbcbc4"
dependencies = [
"autocfg",
"cc",
@@ -2983,7 +2928,7 @@ checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
dependencies = [
"instant",
"lock_api",
"parking_lot_core 0.8.5",
"parking_lot_core 0.8.6",
]
[[package]]
@@ -2998,9 +2943,9 @@ dependencies = [
[[package]]
name = "parking_lot_core"
version = "0.8.5"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc"
dependencies = [
"cfg-if",
"instant",
@@ -3040,9 +2985,9 @@ dependencies = [
[[package]]
name = "paste"
version = "1.0.9"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
checksum = "cf1c2c742266c2f1041c914ba65355a83ae8747b05f208319784083583494b4b"
[[package]]
name = "pathfinder_geometry"
@@ -3080,9 +3025,9 @@ checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
[[package]]
name = "pest"
version = "2.5.0"
version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f400b0f7905bf702f9f3dc3df5a121b16c54e9e8012c082905fdf09a931861a"
checksum = "cc8bed3549e0f9b0a2a78bf7c0018237a2cdf085eecbbc048e52612438e4e9d0"
dependencies = [
"thiserror",
"ucd-trie",
@@ -3186,9 +3131,9 @@ dependencies = [
[[package]]
name = "polling"
version = "2.5.1"
version = "2.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "166ca89eb77fd403230b9c156612965a81e094ec6ec3aa13663d4c8b113fa748"
checksum = "22122d5ec4f9fe1b3916419b76be1e80bcb93f618d071d2edf841b137b2a2bd6"
dependencies = [
"autocfg",
"cfg-if",
@@ -3211,9 +3156,9 @@ dependencies = [
[[package]]
name = "portable-atomic"
version = "0.3.15"
version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15eb2c6e362923af47e13c23ca5afb859e83d54452c55b0b9ac763b8f7c1ac16"
checksum = "ac662b3a6490de378b0ee15cf2dfff7127aebfe0b19acc65e7fbca3d299c3788"
[[package]]
name = "ppv-lite86"
@@ -3407,11 +3352,10 @@ dependencies = [
[[package]]
name = "rayon"
version = "1.6.0"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e060280438193c554f654141c9ea9417886713b7acd75974c85b18a69a88e0b"
checksum = "6db3a213adf02b3bcfd2d3846bb41cb22857d131789e01df434fb7e7bc0759b7"
dependencies = [
"crossbeam-deque",
"either",
"rayon-core",
]
@@ -3428,15 +3372,6 @@ dependencies = [
"num_cpus",
]
[[package]]
name = "rcell"
version = "1.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2c04ae9905949c881834ee94d1a1495d079d649e39babbfc99601aa2fd3f138"
dependencies = [
"sharded_mutex",
]
[[package]]
name = "rcgen"
version = "0.10.0"
@@ -3610,9 +3545,9 @@ dependencies = [
[[package]]
name = "rustix"
version = "0.36.4"
version = "0.36.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb93e85278e08bb5788653183213d3a60fc242b10cb9be96586f5a73dcb67c23"
checksum = "a3807b5d10909833d3e9acd1eb5fb988f79376ff10fce42937de71a449c4c588"
dependencies = [
"bitflags",
"errno",
@@ -3775,9 +3710,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.148"
version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e53f64bb4ba0191d6d0676e1b141ca55047d83b74f5607e6d8eb88126c52c2dc"
checksum = "e326c9ec8042f1b5da33252c8a37e9ffbd2c9bef0155215b6e6c80c790e05f91"
dependencies = [
"serde_derive",
]
@@ -3795,9 +3730,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.148"
version = "1.0.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a55492425aa53521babf6137309e7d34c20bbfbbfcfe2c7f3a047fd1f6b92c0c"
checksum = "42a3df25b0713732468deadad63ab9da1f1fd75a48a15024b50363f128db627e"
dependencies = [
"proc-macro2",
"quote",
@@ -3816,10 +3751,10 @@ dependencies = [
]
[[package]]
name = "sha-1"
version = "0.10.1"
name = "sha1"
version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c"
checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -3837,15 +3772,6 @@ dependencies = [
"digest 0.10.6",
]
[[package]]
name = "sharded_mutex"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26c0a2ba1160131410ea1671689505938033c38f5ce86853446d7dd92c81b515"
dependencies = [
"parking_lot 0.12.1",
]
[[package]]
name = "signal-hook"
version = "0.3.14"
@@ -4192,9 +4118,9 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "syn"
version = "1.0.104"
version = "1.0.105"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ae548ec36cf198c0ef7710d3c230987c2d6d7bd98ad6edc0274462724c585ce"
checksum = "60b9b43d45702de4c839cb9b51d9f529c5dd26a4aff255b42b1ebc03e88ee908"
dependencies = [
"proc-macro2",
"quote",
@@ -4245,7 +4171,6 @@ dependencies = [
"clap 4.0.29",
"colored",
"darkfi",
"fxhash",
"log",
"prettytable-rs",
"serde",
@@ -4270,7 +4195,6 @@ dependencies = [
"darkfi-serial",
"easy-parallel",
"futures",
"fxhash",
"hex",
"log",
"rand",
@@ -4520,9 +4444,9 @@ dependencies = [
[[package]]
name = "tungstenite"
version = "0.17.3"
version = "0.18.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0"
checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788"
dependencies = [
"base64",
"byteorder",
@@ -4531,7 +4455,7 @@ dependencies = [
"httparse",
"log",
"rand",
"sha-1",
"sha1",
"thiserror",
"url",
"utf-8",
@@ -4539,9 +4463,9 @@ dependencies = [
[[package]]
name = "typenum"
version = "1.15.0"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "ucd-trie"
@@ -5279,9 +5203,9 @@ dependencies = [
[[package]]
name = "zeroize_derive"
version = "1.3.2"
version = "1.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17"
checksum = "44bf07cb3e50ea2003396695d58bf46bc9887a1f362260446fad6bc4e79bd36c"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -51,7 +51,7 @@ members = [
[dependencies]
# Hard dependencies
libc = "0.2.137"
libc = "0.2.138"
log = "0.4.17"
thiserror = "1.0.37"
@@ -64,11 +64,11 @@ smol = {version = "1.3.0", optional = true}
# Networking
futures-rustls = {version = "0.22.2", features = ["dangerous_configuration"], optional = true}
iprange = {version = "0.6.7", optional = true}
ipnet = {version = "2.5.1", optional = true}
ipnet = {version = "2.7.0", optional = true}
socket2 = {version = "0.4.7", optional = true}
# TLS cert utilities
ed25519-compact = {version = "2.0.2", features = ["pem"], optional = true}
ed25519-compact = {version = "2.0.4", features = ["pem"], optional = true}
rcgen = {version = "0.10.0", features = ["pem"], optional = true}
rustls-pemfile = {version = "1.0.1", optional = true}
@@ -76,10 +76,12 @@ rustls-pemfile = {version = "1.0.1", optional = true}
bs58 = {version = "0.4.0", optional = true}
hex = {version = "0.4.3", optional = true}
serde_json = {version = "1.0.89", optional = true}
serde = {version = "1.0.148", features = ["derive"], optional = true}
serde = {version = "1.0.150", features = ["derive"], optional = true}
structopt = {version= "0.3.26", optional = true}
structopt-toml = {version= "0.5.1", optional = true}
toml = {version = "0.5.9", optional = true}
# big float
dashu = { version = "0.2.0", git = "https://github.com/ertosns/dashu", optional=true }
# Utilities
# TODO: check chrono usage and impl our own
@@ -87,12 +89,9 @@ chrono = {version = "0.4.23", optional = true}
darkfi-serial = {path = "src/serial", optional = true}
darkfi-derive = {path = "src/serial/derive", optional = true}
darkfi-derive-internal = {path = "src/serial/derive-internal", optional = true}
fxhash = {version = "0.2.1", optional = true}
indexmap = {version = "1.9.2", optional = true}
itertools = {version = "0.10.5", optional = true}
lazy-init = {version = "0.5.1", optional = true}
lazy_static = {version = "1.4.0", optional = true}
subtle = {version = "2.4.1", optional = true}
# TODO: Test without serde
url = {version = "2.3.1", features = ["serde"], optional = true}
@@ -100,27 +99,19 @@ url = {version = "2.3.1", features = ["serde"], optional = true}
# TODO: Implement something simple and kill these deps
indicatif = {version = "0.17.2", optional = true}
simplelog = {version = "0.12.0", optional = true}
termion = {version = "2.0.1", optional = true}
# Websockets
async-tungstenite = {version = "0.18.0", optional = true}
async-tungstenite = {version = "0.19.0", optional = true}
# socks5
fast-socks5 = {version = "0.4.3", optional = true}
# Crypto
rand = {version = "0.8.5", optional = true}
blake2b_simd = {version = "1.0.0", optional = true}
blake3 = {version = "1.3.3", optional = true}
chacha20poly1305 = {version = "0.10.1", optional = true}
crypto_api_chachapoly = {version = "0.5.0", optional = true}
halo2_proofs = {version = "0.2.0", optional = true}
halo2_gadgets = {version = "0.2.0", optional = true}
incrementalmerkletree = {version = "0.3.0", optional = true}
num-bigint = {version = "0.4.3", optional = true}
num-traits = {version = "0.2.15", optional = true}
pasta_curves = {version = "0.4.1", optional = true}
sha2 = {version = "0.10.6", optional = true}
# Smart contract runtime
darkfi-sdk = {path = "src/sdk", optional = true}
@@ -135,20 +126,13 @@ sqlx = {version = "0.6.2", features = ["runtime-async-std-native-tls", "sqlite"]
# Blockchain store
sled = {version = "0.34.7", optional = true}
# big float
dashu = { version = "0.2.0", git = "https://github.com/ertosns/dashu", optional=true }
rcell = { version = "1.1.3", optional=true }
[dev-dependencies]
clap = {version = "4.0.29", features = ["derive"]}
halo2_proofs = {version = "0.2.0", features = ["dev-graph", "gadget-traces", "sanity-checks"]}
halo2_gadgets = {version = "0.2.0", features = ["dev-graph", "test-dependencies"]}
plotters = "0.3.4"
env_logger = "0.10.0"
easy-parallel = "3.2.0"
# -----BEGIN LIBRARY FEATURES-----
[features]
async-runtime = [
@@ -160,14 +144,11 @@ async-runtime = [
blockchain = [
"blake3",
"bs58", # <-- remove after we get rid of json for notifications
"chrono",
"dashu",
"halo2_gadgets",
"halo2_proofs",
"incrementalmerkletree",
"lazy-init",
"lazy_static",
"pasta_curves",
"rand",
"sled",
"sqlx",
@@ -175,43 +156,14 @@ blockchain = [
"crypto_api_chachapoly",
"async-runtime",
"crypto",
"darkfi-sdk",
"darkfi-serial",
"darkfi-serial/crypto",
"net",
"rpc",
"tx",
"util",
"wallet",
"wasm-runtime",
"rcell"
]
crypto = [
"blake2b_simd",
"blake3",
"bs58",
"chacha20poly1305",
"fxhash",
"halo2_gadgets",
"halo2_proofs",
"hex",
"incrementalmerkletree",
"lazy_static",
"num-bigint",
"num-traits",
"pasta_curves",
"rand",
"serde",
"serde_json",
"sha2",
"subtle",
"darkfi-sdk",
"darkfi-serial",
"darkfi-serial/crypto",
"util",
"zkas",
]
dht = [
@@ -227,7 +179,6 @@ dht = [
]
net = [
"fxhash",
"ed25519-compact",
"fast-socks5",
"futures-rustls",
@@ -254,7 +205,6 @@ net = [
raft = [
"chrono",
"fxhash",
"rand",
"sled",
@@ -277,29 +227,26 @@ rpc = [
]
system = [
"fxhash",
"rand",
"async-runtime",
]
tx = [
"incrementalmerkletree",
"blake3",
"rand",
"async-runtime",
"darkfi-sdk",
"darkfi-serial",
"crypto",
"zk",
]
util = [
"chrono",
"indicatif",
"rand",
"simplelog",
"serde",
"serde_json",
"termion",
"toml",
"url",
@@ -308,13 +255,10 @@ util = [
wallet = [
"async-std",
"bs58",
"rand",
"sqlx",
"incrementalmerkletree",
"libsqlite3-sys",
"crypto",
"darkfi-serial",
"util",
]
@@ -332,8 +276,15 @@ websockets = [
"async-tungstenite",
]
zk = [
"halo2_proofs",
"halo2_gadgets",
"darkfi-sdk",
"zkas",
]
zkas = [
"termion",
"indexmap",
"itertools",

View File

@@ -64,7 +64,6 @@ rustdoc: token_lists zkas
test: token_lists zkas $(PROOFS_BIN) contracts
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) test --release --all-features --all
$(MAKE) -C src/contract/money test
test-dao: zkas
$(MAKE) -C example/dao

View File

@@ -32,17 +32,16 @@ num_cpus = "1.14.0"
simplelog = "0.12.0"
thiserror = "1.0.37"
url = "2.3.1"
fxhash = "0.2.1"
# Encoding and parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
serde_json = "1.0.89"
# Bitcoin bridge dependencies
bdk = {version = "0.24.0", optional = true}
bdk = {version = "0.25.0", optional = true}
anyhow = {version = "1.0.66", optional = true}
bitcoin = {version = "0.29.1", optional = true}
secp256k1 = {version = "0.24.1", default-features = false, features = ["rand-std"], optional = true}
secp256k1 = {version = "0.25.0", default-features = false, features = ["rand-std"], optional = true}
# Ethereum bridge dependencies
hex = {version = "0.4.3", optional = true}
@@ -54,9 +53,9 @@ num-bigint = {version = "0.4.3", features = ["rand", "serde"], optional = true}
# Solana bridge dependencies
native-tls = {version = "0.2.11", optional = true}
async-native-tls = {version = "0.4.0", optional = true}
solana-client = {version = "1.14.9", optional = true}
solana-sdk = {version = "1.14.9", optional = true}
spl-associated-token-account = {version = "1.1.1", features = ["no-entrypoint"], optional = true}
solana-client = {version = "1.14.10", optional = true}
solana-sdk = {version = "1.14.10", optional = true}
spl-associated-token-account = {version = "1.1.2", features = ["no-entrypoint"], optional = true}
spl-token = {version = "3.5.0", features = ["no-entrypoint"], optional = true}
tungstenite = {version = "0.18.0", optional = true}

View File

@@ -15,13 +15,12 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use async_std::sync::{Arc, Mutex};
use std::collections::HashMap;
use async_executor::Executor;
use async_std::sync::{Arc, Mutex};
use async_trait::async_trait;
use futures::stream::{FuturesUnordered, StreamExt};
use fxhash::FxHashMap;
use log::{debug, error};
use darkfi::{
@@ -82,16 +81,13 @@ pub struct TokenNotification {
}
pub struct Bridge {
clients: Mutex<FxHashMap<NetworkName, Arc<dyn NetworkClient + Send + Sync>>>,
clients: Mutex<HashMap<NetworkName, Arc<dyn NetworkClient + Send + Sync>>>,
notifiers: FuturesUnordered<async_channel::Receiver<TokenNotification>>,
}
impl Bridge {
pub fn new() -> Arc<Self> {
Arc::new(Self {
clients: Mutex::new(FxHashMap::default()),
notifiers: FuturesUnordered::new(),
})
Arc::new(Self { clients: Mutex::new(HashMap::new()), notifiers: FuturesUnordered::new() })
}
pub async fn add_clients(

View File

@@ -39,7 +39,6 @@ incrementalmerkletree = "0.3.0"
# Encoding and parsing
serde_json = "1.0.89"
bs58 = "0.4.0"
fxhash = "0.2.1"
# Utilities
lazy_static = "1.4.0"

View File

@@ -16,9 +16,8 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{sync::Arc, time::Instant};
use std::{collections::HashMap, sync::Arc, time::Instant};
use fxhash::FxHashMap;
use log::debug;
use rand::rngs::OsRng;
use simplelog::{ColorChoice, LevelFilter, TermLogger, TerminalMode};
@@ -166,7 +165,7 @@ use crate::{
pub struct Client {
dao_wallet: DaoWallet,
money_wallets: FxHashMap<[u8; 32], MoneyWallet>,
money_wallets: HashMap<[u8; 32], MoneyWallet>,
cashier_wallet: CashierWallet,
states: StateRegistry,
zk_bins: ZkContractTable,
@@ -176,7 +175,7 @@ impl Client {
fn new() -> Self {
// For this early demo we store all wallets in a single Client.
let dao_wallet = DaoWallet::new();
let money_wallets = FxHashMap::default();
let money_wallets = HashMap::new();
let cashier_wallet = CashierWallet::new();
// Lookup table for smart contract states
@@ -642,8 +641,8 @@ impl DaoWallet {
Ok(())
}
fn balances(&self) -> Result<FxHashMap<String, u64>> {
let mut ret: FxHashMap<String, u64> = FxHashMap::default();
fn balances(&self) -> Result<HashMap<String, u64>> {
let mut ret: HashMap<String, u64> = HashMap::new();
for (coin, is_spent) in &self.own_coins {
if *is_spent {}
if coin.note.token_id == *DRK_ID || coin.note.token_id == *GOV_ID {
@@ -908,8 +907,8 @@ impl MoneyWallet {
Ok(())
}
fn balances(&self) -> Result<FxHashMap<String, u64>> {
let mut ret: FxHashMap<String, u64> = FxHashMap::default();
fn balances(&self) -> Result<HashMap<String, u64>> {
let mut ret: HashMap<String, u64> = HashMap::new();
for (coin, is_spent) in &self.own_coins {
if *is_spent {}
if coin.note.token_id == *DRK_ID || coin.note.token_id == *GOV_ID {

View File

@@ -13,18 +13,12 @@ async-std = "1.12.0"
async-trait = "0.1.59"
blake3 = "1.3.3"
bs58 = "0.4.0"
chrono = "0.4.23"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
darkfi = {path = "../../", features = ["blockchain", "wallet", "rpc", "net"]}
darkfi-sdk = {path = "../../src/sdk"}
darkfi-serial = {path = "../../src/serial"}
easy-parallel = "3.2.0"
fxhash = "0.2.1"
incrementalmerkletree = "0.3.0"
lazy-init = "0.5.1"
log = "0.4.17"
pasta_curves = "0.4.1"
rand = "0.8.5"
serde_json = "1.0.89"
simplelog = "0.12.0"
sled = "0.34.7"
@@ -33,6 +27,6 @@ sqlx = {version = "0.6.2", features = ["runtime-async-std-native-tls", "sqlite"]
url = "2.3.1"
# Argument parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -1,362 +0,0 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2022 Dyne.org foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use darkfi_sdk::crypto::{Address, Keypair, PublicKey, SecretKey, TokenId};
use darkfi_serial::{deserialize, serialize};
use fxhash::FxHashMap;
use incrementalmerkletree::Tree;
use log::error;
use serde_json::{json, Value};
use darkfi::{
node::State,
rpc::jsonrpc::{
ErrorCode::{InternalError, InvalidParams, ParseError},
JsonError, JsonResponse, JsonResult,
},
};
use super::Darkfid;
use crate::{server_error, RpcError};
impl Darkfid {
// RPCAPI:
// Attempts to generate a new keypair and returns its address upon success.
//
// --> {"jsonrpc": "2.0", "method": "wallet.keygen", "params": [], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "1DarkFi...", "id": 1}
pub async fn wallet_keygen(&self, id: Value, params: &[Value]) -> JsonResult {
if !params.is_empty() {
return JsonError::new(InvalidParams, None, id).into()
}
match self.client.keygen().await {
Ok(a) => JsonResponse::new(json!(a.to_string()), id).into(),
Err(e) => {
error!("[RPC] wallet.keygen: Failed creating keypair: {}", e);
server_error(RpcError::Keygen, id, None)
}
}
}
// RPCAPI:
// Fetches public keys by given indexes from the wallet and returns it in an
// encoded format. `-1` is supported to fetch all available keys.
//
// --> {"jsonrpc": "2.0", "method": "wallet.get_addrs", "params": [1, 2], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["foo", "bar"], "id": 1}
pub async fn wallet_get_addrs(&self, id: Value, params: &[Value]) -> JsonResult {
if params.is_empty() {
return JsonError::new(InvalidParams, None, id).into()
}
let mut fetch_all = false;
for (i, elem) in params.iter().enumerate() {
if !elem.is_i64() {
error!("[RPC] wallet.get_addrs: Param {} is not i64", i);
return server_error(RpcError::NaN, id, Some(&format!("Param {} is not i64", i)))
}
if elem.as_i64() == Some(-1) {
if params.len() != 1 {
return server_error(
RpcError::ParseError,
id,
Some("-1 can only be used as a single param"),
)
}
fetch_all = true;
break
}
if elem.as_i64() < Some(-1) {
return server_error(RpcError::LessThanNegOne, id, None)
}
}
let keypairs = match self.client.get_keypairs().await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.get_addrs: Failed fetching keypairs: {}", e);
return server_error(RpcError::KeypairFetch, id, None)
}
};
if fetch_all {
let ret: Vec<String> =
keypairs.iter().map(|x| Address::from(x.public).to_string()).collect();
return JsonResponse::new(json!(ret), id).into()
}
let mut ret = vec![];
for i in params {
// This cast is safe on 64bit since we've already sorted out
// all negative cases above.
let idx = i.as_i64().unwrap() as usize;
if let Some(kp) = keypairs.get(idx) {
ret.push(Some(Address::from(kp.public).to_string()));
} else {
ret.push(None)
}
}
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Exports the given keypair index.
// Returns the encoded secret key upon success.
//
// --> {"jsonrpc": "2.0", "method": "wallet.export_keypair", "params": [0], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "foobar", "id": 1}
pub async fn wallet_export_keypair(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_u64() {
return JsonError::new(InvalidParams, None, id).into()
}
let keypairs = match self.client.get_keypairs().await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.export_keypair: Failed fetching keypairs: {}", e);
return server_error(RpcError::KeypairFetch, id, None)
}
};
if let Some(kp) = keypairs.get(params[0].as_u64().unwrap() as usize) {
return JsonResponse::new(json!(serialize(&kp.secret)), id).into()
}
server_error(RpcError::KeypairNotFound, id, None)
}
// RPCAPI:
// Imports a given secret key into the wallet as a keypair.
// Returns the public counterpart as the result upon success.
//
// --> {"jsonrpc": "2.0", "method": "wallet.import_keypair", "params": ["foobar"], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "pubfoobar", "id": 1}
pub async fn wallet_import_keypair(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_string() {
return JsonError::new(InvalidParams, None, id).into()
}
let bytes: [u8; 32] = match serde_json::from_str(params[0].as_str().unwrap()) {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.import_keypair: Failed parsing secret key from string: {}", e);
return server_error(RpcError::InvalidKeypair, id, None)
}
};
let secret = match SecretKey::from_bytes(bytes) {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.import_keypair: Failed parsing secret key from string: {}", e);
return server_error(RpcError::InvalidKeypair, id, None)
}
};
let public = PublicKey::from_secret(secret);
let keypair = Keypair { secret, public };
let address = Address::from(public).to_string();
if let Err(e) = self.client.put_keypair(&keypair).await {
error!("[RPC] wallet.import_keypair: Failed inserting keypair into wallet: {}", e);
return JsonError::new(InternalError, None, id).into()
}
JsonResponse::new(json!(address), id).into()
}
// RPCAPI:
// Sets the default wallet address to the given index.
// Returns `true` upon success.
//
// --> {"jsonrpc": "2.0", "method": "wallet.set_default_address", "params": [2], "id": 1}
// <-- {"jsonrpc": "2.0", "result": true, "id": 1}
pub async fn wallet_set_default_address(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_u64() {
return JsonError::new(InvalidParams, None, id).into()
}
let idx = params[0].as_u64().unwrap();
let keypairs = match self.client.get_keypairs().await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.set_default_address: Failed fetching keypairs: {}", e);
return server_error(RpcError::KeypairFetch, id, None)
}
};
if keypairs.len() as u64 != idx - 1 {
return server_error(RpcError::KeypairNotFound, id, None)
}
let kp = keypairs[idx as usize];
if let Err(e) = self.client.set_default_keypair(&kp.public).await {
error!("[RPC] wallet.set_default_address: Failed setting default keypair: {}", e);
return JsonError::new(InternalError, None, id).into()
}
JsonResponse::new(json!(true), id).into()
}
// RPCAPI:
// Queries the wallet for known tokens with active balances.
// Returns a map of balances, indexed by the token ID.
//
// --> {"jsonrpc": "2.0", "method": "wallet.get_balances", "params": [], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [{"1Foobar...": 100}, {...}]", "id": 1}
pub async fn wallet_get_balances(&self, id: Value, _params: &[Value]) -> JsonResult {
let balances = match self.client.get_balances().await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.get_balances: Failed fetching balances from wallet: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
// k: token_id, v: [amount]
let mut ret: FxHashMap<String, u64> = FxHashMap::default();
for balance in balances.list {
let token_id = format!("{}", TokenId::from(balance.token_id));
let mut amount = balance.value;
if let Some(prev) = ret.get(&token_id) {
amount += prev;
}
ret.insert(token_id, amount);
}
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Queries the wallet for a coin containing given parameters (value, token_id, unspent),
// and returns the entire row with the coin's data:
//
// --> {"jsonrpc": "2.0", "method": "wallet.get_coins_valtok", "params": [1234, "F00b4r...", true], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["coin", "data", ...], "id": 1}
pub async fn wallet_get_coins_valtok(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 3 ||
!params[0].is_u64() ||
!params[1].is_string() ||
!params[2].is_boolean()
{
return JsonError::new(InvalidParams, None, id).into()
}
let value = params[0].as_u64().unwrap();
let unspent = params[2].as_bool().unwrap();
let token_id = match TokenId::try_from(params[1].as_str().unwrap()) {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.get_coins_valtok: Failed parsing token_id from base58: {}", e);
return JsonError::new(ParseError, None, id).into()
}
};
let coins = match self.client.get_coins_valtok(value, token_id, unspent).await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.get_coins_valtok: Failed fetching from wallet: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let ret: Vec<String> =
coins.iter().map(|x| bs58::encode(serialize(x)).into_string()).collect();
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Query the state merkle tree for the merkle path of a given leaf position.
//
// --> {"jsonrpc": "2.0", "method": "wallet.get_merkle_path", "params": [3], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["f091uf1...", "081ff0h10w1h0...", ...], "id": 1}
pub async fn wallet_get_merkle_path(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_u64() {
return JsonError::new(InvalidParams, None, id).into()
}
let leaf_pos: incrementalmerkletree::Position =
((params[0].as_u64().unwrap() as u64) as usize).into();
let validator_state = self.validator_state.read().await;
let state = validator_state.state_machine.lock().await;
let root = state.tree.root(0).unwrap();
let merkle_path = state.tree.authentication_path(leaf_pos, &root).unwrap();
drop(state);
drop(validator_state);
let ret: Vec<String> =
merkle_path.iter().map(|x| bs58::encode(serialize(x)).into_string()).collect();
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Try to decrypt a given encrypted note with the secret keys
// found in the wallet.
//
// --> {"jsonrpc": "2.0", "method": "wallet.decrypt_note", params": [ciphertext], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "base58_encoded_plain_note", "id": 1}
pub async fn wallet_decrypt_note(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_string() {
return JsonError::new(InvalidParams, None, id).into()
}
let bytes = match bs58::decode(params[0].as_str().unwrap()).into_vec() {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.decrypt_note: Failed decoding base58 string: {}", e);
return JsonError::new(ParseError, None, id).into()
}
};
let enc_note = match deserialize(&bytes) {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.decrypt_note: Failed deserializing into EncryptedNote: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let keypairs = match self.client.get_keypairs().await {
Ok(v) => v,
Err(e) => {
error!("[RPC] wallet.decrypt_note: Failed fetching keypairs: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
for kp in keypairs {
if let Some(note) = State::try_decrypt_note(&enc_note, kp.secret) {
let s = bs58::encode(&serialize(&note)).into_string();
return JsonResponse::new(json!(s), id).into()
}
}
server_error(RpcError::DecryptionFailed, id, None)
}
}

View File

@@ -25,6 +25,6 @@ rand = "0.8.5"
url = "2.3.1"
# Encoding and parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
serde_json = "1.0.89"
structopt = "0.3.26"

View File

@@ -18,11 +18,10 @@ darkfi-serial = {path = "../../../src/serial"}
dryoc = "0.4.2"
easy-parallel = "3.2.0"
futures = "0.3.25"
fxhash = "0.2.1"
lazy_static = "1.4.0"
log = "0.4.17"
serde = "1.0.148"
serde_derive = "1.0.148"
serde = "1.0.150"
serde_derive = "1.0.150"
serde_json = "1.0.89"
signal-hook-async-std = "0.2.2"
signal-hook = "0.3.14"

View File

@@ -17,6 +17,7 @@
*/
use std::{
collections::HashMap,
fs::{create_dir_all, read_dir, remove_file},
io::stdin,
path::{Path, PathBuf},
@@ -30,7 +31,6 @@ use async_std::{
};
use dryoc::classic::crypto_secretbox::{crypto_secretbox_keygen, Key};
use futures::{select, FutureExt};
use fxhash::FxHashMap;
use lazy_static::lazy_static;
use log::{debug, error, info, warn};
use signal_hook::consts::{SIGHUP, SIGINT, SIGQUIT, SIGTERM};
@@ -62,7 +62,7 @@ type Patches = (Vec<Patch>, Vec<Patch>, Vec<Patch>, Vec<Patch>);
lazy_static! {
/// This is where we hold our workspaces, so we are also able to refresh them on SIGHUP.
static ref WORKSPACES: RwLock<FxHashMap<String, Key>> = RwLock::new(FxHashMap::default());
static ref WORKSPACES: RwLock<HashMap<String, Key>> = RwLock::new(HashMap::new());
}
pub const CONFIG_FILE: &str = "darkwikid_config.toml";
@@ -595,7 +595,7 @@ async fn realmain(args: Args, executor: Arc<smol::Executor<'_>>) -> Result<()> {
// ====
// Raft
// ====
let seen_net_msgs = Arc::new(Mutex::new(FxHashMap::default()));
let seen_net_msgs = Arc::new(Mutex::new(HashMap::new()));
let store_raft = store_path.join("darkwiki.db");
let raft_settings = RaftSettings { datastore_path: store_raft, ..RaftSettings::default() };
// FIXME: This is a bad design, and needs a proper rework.

View File

@@ -488,7 +488,7 @@ impl Decodable for OpMethods {
#[cfg(test)]
mod tests {
use super::*;
use darkfi::util::gen_id;
use darkfi::raft::gen_id;
use darkfi_serial::{deserialize, serialize};
#[test]

View File

@@ -17,6 +17,7 @@
*/
use std::{
collections::HashMap,
fs::{create_dir_all, read_dir},
path::{Path, PathBuf},
};
@@ -27,7 +28,6 @@ use dryoc::{
constants::CRYPTO_SECRETBOX_MACBYTES,
dryocbox::NewByteArray,
};
use fxhash::FxHashMap;
use log::{error, info, warn};
use unicode_segmentation::UnicodeSegmentation;
@@ -54,11 +54,11 @@ fn parse_b58_secret(s: &str) -> Result<[u8; 32]> {
}
}
/// Parse a TOML string for configured workspaces and return an `FxHashMap`
/// Parse a TOML string for configured workspaces and return an `HashMap`
/// of parsed data. Does not error on failures, just warns if something is
/// misconfigured.
pub fn parse_workspaces(toml_str: &str) -> FxHashMap<String, Key> {
let mut ret = FxHashMap::default();
pub fn parse_workspaces(toml_str: &str) -> HashMap<String, Key> {
let mut ret = HashMap::new();
let settings: Args = match toml::from_str(toml_str) {
Ok(v) => v,

View File

@@ -30,10 +30,9 @@ simplelog = "0.12.0"
log = "0.4.17"
num_cpus = "1.14.0"
url = "2.3.1"
fxhash = "0.2.1"
thiserror = "1.0.37"
# Encoding and parsing
serde_json = "1.0.89"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
hex = "0.4.3"

View File

@@ -15,15 +15,15 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use async_std::sync::{Arc, Mutex};
use fxhash::FxHashMap;
use serde::{Deserialize, Serialize};
use darkfi::util::time::NanoTimestamp;
type MsgLog = Vec<(NanoTimestamp, String, String)>;
type MsgMap = Mutex<FxHashMap<String, MsgLog>>;
type MsgMap = Mutex<HashMap<String, MsgLog>>;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub enum Session {
@@ -46,13 +46,13 @@ pub enum SelectableObject {
pub struct Model {
pub msg_map: MsgMap,
pub msg_log: Mutex<MsgLog>,
pub selectables: Mutex<FxHashMap<String, SelectableObject>>,
pub selectables: Mutex<HashMap<String, SelectableObject>>,
}
impl Model {
pub fn new() -> Arc<Self> {
let selectables = Mutex::new(FxHashMap::default());
let msg_map = Mutex::new(FxHashMap::default());
let selectables = Mutex::new(HashMap::new());
let msg_map = Mutex::new(HashMap::new());
let msg_log = Mutex::new(Vec::new());
Arc::new(Model { msg_map, msg_log, selectables })
}

View File

@@ -16,7 +16,8 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use fxhash::FxHashMap;
use std::collections::HashMap;
use tui::{
backend::Backend,
layout::{Constraint, Direction, Layout, Rect},
@@ -36,13 +37,13 @@ use crate::{
//use log::debug;
type MsgLog = Vec<(NanoTimestamp, String, String)>;
type MsgMap = FxHashMap<String, MsgLog>;
type MsgMap = HashMap<String, MsgLog>;
#[derive(Debug, Clone)]
pub struct View {
pub id_menu: IdMenu,
pub msg_list: MsgList,
pub selectables: FxHashMap<String, SelectableObject>,
pub selectables: HashMap<String, SelectableObject>,
pub ordered_list: Vec<String>,
}
@@ -54,16 +55,16 @@ impl Default for View {
impl<'a> View {
pub fn new() -> Self {
let msg_map = FxHashMap::default();
let msg_map = HashMap::new();
let msg_list = MsgList::new(msg_map, 0);
let selectables = FxHashMap::default();
let selectables = HashMap::new();
let id_menu = IdMenu::new(Vec::new());
let ordered_list = Vec::new();
Self { id_menu, msg_list, selectables, ordered_list }
}
pub fn update(&mut self, msg_map: MsgMap, selectables: FxHashMap<String, SelectableObject>) {
pub fn update(&mut self, msg_map: MsgMap, selectables: HashMap<String, SelectableObject>) {
self.update_selectable(selectables.clone());
self.update_msg_list(msg_map);
self.update_id_menu(selectables);
@@ -71,7 +72,7 @@ impl<'a> View {
self.make_ordered_list();
}
fn update_id_menu(&mut self, selectables: FxHashMap<String, SelectableObject>) {
fn update_id_menu(&mut self, selectables: HashMap<String, SelectableObject>) {
for id in selectables.keys() {
if !self.id_menu.ids.iter().any(|i| i == id) {
self.id_menu.ids.push(id.to_string());
@@ -79,7 +80,7 @@ impl<'a> View {
}
}
fn update_selectable(&mut self, selectables: FxHashMap<String, SelectableObject>) {
fn update_selectable(&mut self, selectables: HashMap<String, SelectableObject>) {
for (id, obj) in selectables {
self.selectables.insert(id, obj);
}
@@ -487,11 +488,11 @@ impl MsgList {
#[derive(Debug, Clone)]
pub struct NodeInfoView {
pub index: usize,
pub infos: FxHashMap<String, NodeInfo>,
pub infos: HashMap<String, NodeInfo>,
}
impl NodeInfoView {
pub fn new(infos: FxHashMap<String, NodeInfo>) -> NodeInfoView {
pub fn new(infos: HashMap<String, NodeInfo>) -> NodeInfoView {
let index = 0;
NodeInfoView { index, infos }

View File

@@ -14,8 +14,8 @@ async-trait = "0.1.59"
blake3 = "1.3.3"
bs58 = "0.4.0"
chrono = "0.4.23"
ctrlc = { version = "3.2.3", features = ["termination"] }
darkfi = {path = "../../", features = ["blockchain", "wallet", "rpc", "net", "zkas", "crypto"]}
ctrlc = { version = "3.2.4", features = ["termination"] }
darkfi = {path = "../../", features = ["blockchain", "wallet", "rpc", "net", "zkas"]}
darkfi-serial = {path = "../../src/serial"}
darkfi-sdk = {path = "../../src/sdk"}
darkfi-money-contract = {path = "../../src/contract/money", features = ["no-entrypoint", "client"]}
@@ -32,6 +32,6 @@ sqlx = {version = "0.6.2", features = ["runtime-async-std-native-tls", "sqlite"]
url = "2.3.1"
# Argument parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -15,7 +15,7 @@ darkfi-serial = {path = "../../../src/serial"}
# Async
async-std = "1.12.0"
async-trait = "0.1.59"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
easy-parallel = "3.2.0"
smol = "1.3.0"
@@ -27,6 +27,6 @@ simplelog = "0.12.0"
url = "2.3.1"
# Argument parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -30,15 +30,14 @@ rand = "0.8.5"
clap = {version = "4.0.29", features = ["derive"]}
log = "0.4.17"
simplelog = "0.12.0"
fxhash = "0.2.1"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
url = "2.3.1"
chrono = "0.4.23"
ripemd = "0.1.3"
# Encoding and parsing
serde_json = "1.0.89"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"
bs58 = "0.4.0"

View File

@@ -16,11 +16,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use crypto_box::{
aead::{Aead, AeadCore},
SalsaBox,
};
use fxhash::FxHashMap;
use rand::rngs::OsRng;
use crate::{
@@ -71,8 +72,8 @@ pub fn encrypt(salt_box: &SalsaBox, plaintext: &str) -> String {
pub fn decrypt_target(
contact: &mut String,
privmsg: &mut Privmsg,
configured_chans: FxHashMap<String, ChannelInfo>,
configured_contacts: FxHashMap<String, ContactInfo>,
configured_chans: HashMap<String, ChannelInfo>,
configured_contacts: HashMap<String, ContactInfo>,
) {
for chan_name in configured_chans.keys() {
let chan_info = configured_chans.get(chan_name).unwrap();

View File

@@ -16,7 +16,7 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{fs::File, net::SocketAddr};
use std::{collections::HashMap, fs::File, net::SocketAddr};
use async_std::{
net::TcpListener,
@@ -24,7 +24,6 @@ use async_std::{
};
use futures::{io::BufReader, AsyncRead, AsyncReadExt, AsyncWrite};
use futures_rustls::{rustls, TlsAcceptor};
use fxhash::FxHashMap;
use log::{error, info};
use smol::Executor;
@@ -60,12 +59,12 @@ pub struct IrcConfig {
// user config
pub nickname: String,
pub password: String,
pub capabilities: FxHashMap<String, bool>,
pub capabilities: HashMap<String, bool>,
// channels and contacts
pub auto_channels: Vec<String>,
pub configured_chans: FxHashMap<String, ChannelInfo>,
pub configured_contacts: FxHashMap<String, ContactInfo>,
pub configured_chans: HashMap<String, ChannelInfo>,
pub configured_contacts: HashMap<String, ContactInfo>,
}
impl IrcConfig {
@@ -80,7 +79,7 @@ impl IrcConfig {
let configured_chans = parse_configured_channels(&toml_contents)?;
let configured_contacts = parse_configured_contacts(&toml_contents)?;
let mut capabilities = FxHashMap::default();
let mut capabilities = HashMap::new();
capabilities.insert("no-history".to_string(), false);
Ok(Self {
is_nick_init: false,

View File

@@ -16,11 +16,10 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{cmp::Ordering, fmt, io};
use std::{cmp::Ordering, collections::HashMap, fmt, io};
use async_std::sync::Arc;
use darkfi_serial::{Decodable, Encodable, ReadExt, SerialDecodable, SerialEncodable};
use fxhash::FxHashMap;
use ripemd::{Digest, Ripemd256};
use darkfi::{Error, Result};
@@ -129,8 +128,8 @@ struct EventNode {
pub struct Model {
// This is periodically updated so we discard old nodes
current_root: EventId,
orphans: FxHashMap<EventId, Event>,
event_map: FxHashMap<EventId, EventNode>,
orphans: HashMap<EventId, Event>,
event_map: HashMap<EventId, EventNode>,
events_queue: EventsQueueArc,
}
@@ -153,10 +152,10 @@ impl Model {
let root_node_id = root_node.event.hash();
let mut event_map = FxHashMap::default();
let mut event_map = HashMap::new();
event_map.insert(root_node_id, root_node);
Self { current_root: root_node_id, orphans: FxHashMap::default(), event_map, events_queue }
Self { current_root: root_node_id, orphans: HashMap::new(), event_map, events_queue }
}
pub fn add(&mut self, event: Event) {

View File

@@ -16,12 +16,11 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::VecDeque;
use std::collections::{HashMap, VecDeque};
use async_std::sync::{Arc, Mutex};
use async_trait::async_trait;
use darkfi_serial::{SerialDecodable, SerialEncodable};
use fxhash::FxHashMap;
use log::debug;
use rand::{rngs::OsRng, RngCore};
use smol::Executor;
@@ -105,12 +104,12 @@ impl<T: Eq + PartialEq + Clone> Seen<T> {
#[derive(Default)]
pub struct UnreadEvents {
events: FxHashMap<EventId, Event>,
events: HashMap<EventId, Event>,
}
impl UnreadEvents {
pub fn new() -> Self {
Self { events: FxHashMap::default() }
Self { events: HashMap::default() }
}
fn contains(&self, key: &EventId) -> bool {

View File

@@ -15,9 +15,9 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use crypto_box::SalsaBox;
use fxhash::FxHashMap;
use log::{info, warn};
use serde::Deserialize;
use structopt::StructOpt;
@@ -193,8 +193,8 @@ fn parse_priv_key(data: &str) -> Result<String> {
/// [contact."nick"]
/// contact_pubkey = "7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"
/// ```
pub fn parse_configured_contacts(data: &str) -> Result<FxHashMap<String, ContactInfo>> {
let mut ret = FxHashMap::default();
pub fn parse_configured_contacts(data: &str) -> Result<HashMap<String, ContactInfo>> {
let mut ret = HashMap::new();
let map = match toml::from_str(data) {
Ok(Value::Table(m)) => m,
@@ -296,8 +296,8 @@ pub fn parse_configured_contacts(data: &str) -> Result<FxHashMap<String, Contact
/// secret = "7CkVuFgwTUpJn5Sv67Q3fyEDpa28yrSeL5Hg2GqQ4jfM"
/// topic = "Dank Memes"
/// ```
pub fn parse_configured_channels(data: &str) -> Result<FxHashMap<String, ChannelInfo>> {
let mut ret = FxHashMap::default();
pub fn parse_configured_channels(data: &str) -> Result<HashMap<String, ChannelInfo>> {
let mut ret = HashMap::new();
let map = match toml::from_str(data)? {
Value::Table(m) => m,

View File

@@ -16,19 +16,19 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use fxhash::FxHashMap;
use std::collections::HashMap;
use darkfi::Result;
use crate::model::{Event, EventId, EventsQueueArc};
struct View {
seen: FxHashMap<EventId, Event>,
seen: HashMap<EventId, Event>,
}
impl View {
pub fn new() -> Self {
Self { seen: FxHashMap::default() }
Self { seen: HashMap::new() }
}
pub async fn process(&mut self, events_queue: EventsQueueArc) -> Result<()> {

View File

@@ -29,15 +29,14 @@ rand = "0.8.5"
clap = {version = "4.0.29", features = ["derive"]}
log = "0.4.17"
simplelog = "0.12.0"
fxhash = "0.2.1"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
url = "2.3.1"
chrono = "0.4.23"
ripemd = "0.1.3"
# Encoding and parsing
serde_json = "1.0.89"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"
bs58 = "0.4.0"

View File

@@ -16,11 +16,10 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{cmp::Ordering, fmt};
use std::{cmp::Ordering, collections::HashMap, fmt};
use async_std::sync::{Arc, Mutex};
use darkfi_serial::{Encodable, SerialDecodable, SerialEncodable};
use fxhash::FxHashMap;
use ripemd::{Digest, Ripemd256};
use crate::{
@@ -80,8 +79,8 @@ pub type ModelPtr = Arc<Mutex<Model>>;
pub struct Model {
// This is periodically updated so we discard old nodes
current_root: EventId,
orphans: FxHashMap<EventId, Event>,
event_map: FxHashMap<EventId, EventNode>,
orphans: HashMap<EventId, Event>,
event_map: HashMap<EventId, EventNode>,
events_queue: EventsQueuePtr,
}
@@ -104,10 +103,10 @@ impl Model {
let root_node_id = root_node.event.hash();
let mut event_map = FxHashMap::default();
let mut event_map = HashMap::new();
event_map.insert(root_node_id, root_node);
Self { current_root: root_node_id, orphans: FxHashMap::default(), event_map, events_queue }
Self { current_root: root_node_id, orphans: HashMap::new(), event_map, events_queue }
}
pub fn add(&mut self, event: Event) {

View File

@@ -16,12 +16,11 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::VecDeque;
use std::collections::{HashMap, VecDeque};
use async_std::sync::{Arc, Mutex};
use async_trait::async_trait;
use darkfi_serial::{SerialDecodable, SerialEncodable};
use fxhash::FxHashMap;
use log::debug;
use rand::{rngs::OsRng, RngCore};
@@ -106,12 +105,12 @@ impl<T: Eq + PartialEq + Clone> Seen<T> {
pub type UnreadEventsPtr = Arc<Mutex<UnreadEvents>>;
pub struct UnreadEvents {
events: FxHashMap<EventId, Event>,
events: HashMap<EventId, Event>,
}
impl UnreadEvents {
pub fn new() -> UnreadEventsPtr {
Arc::new(Mutex::new(Self { events: FxHashMap::default() }))
Arc::new(Mutex::new(Self { events: HashMap::new() }))
}
fn contains(&self, key: &EventId) -> bool {

View File

@@ -16,7 +16,7 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use fxhash::FxHashMap;
use std::collections::HashMap;
use darkfi::Result;
@@ -26,13 +26,13 @@ use crate::{
};
pub struct View {
seen: FxHashMap<EventId, Event>,
seen: HashMap<EventId, Event>,
events_queue: EventsQueuePtr,
}
impl View {
pub fn new(events_queue: EventsQueuePtr) -> Self {
Self { seen: FxHashMap::default(), events_queue }
Self { seen: HashMap::new(), events_queue }
}
pub async fn process(&mut self) -> Result<()> {

View File

@@ -14,19 +14,18 @@ darkfi = {path = "../../", features = ["net", "rpc"]}
# Async
async-std = "1.12.0"
async-trait = "0.1.59"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
easy-parallel = "3.2.0"
smol = "1.3.0"
# Misc
fxhash = "0.2.1"
log = "0.4.17"
serde_json = "1.0.89"
simplelog = "0.12.0"
url = "2.3.1"
# Argument parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.1"
toml = "0.5.9"

View File

@@ -16,7 +16,8 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use fxhash::FxHashMap;
use std::collections::HashMap;
use log::{info, warn};
use structopt_toml::{serde::Deserialize, structopt::StructOpt, StructOptToml};
use toml::Value;
@@ -73,8 +74,8 @@ pub struct NetInfo {
/// seeds = []
/// peers = []
/// ```
pub fn parse_configured_networks(data: &str) -> Result<FxHashMap<String, NetInfo>> {
let mut ret = FxHashMap::default();
pub fn parse_configured_networks(data: &str) -> Result<HashMap<String, NetInfo>> {
let mut ret = HashMap::new();
if let Value::Table(map) = toml::from_str(data)? {
if map.contains_key("network") && map["network"].is_table() {

View File

@@ -16,11 +16,13 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::path::Path;
use std::{
collections::{HashMap, HashSet},
path::Path,
};
use async_std::sync::Arc;
use async_trait::async_trait;
use fxhash::{FxHashMap, FxHashSet};
use log::{error, info, warn};
use serde_json::{json, Value};
use structopt_toml::StructOptToml;
@@ -88,9 +90,9 @@ struct Lilith {
}
impl Lilith {
async fn spawns_hosts(&self) -> FxHashMap<String, Vec<String>> {
async fn spawns_hosts(&self) -> HashMap<String, Vec<String>> {
// Building urls string
let mut spawns = FxHashMap::default();
let mut spawns = HashMap::new();
for spawn in &self.spawns {
spawns.insert(spawn.name.clone(), spawn.addresses().await);
}
@@ -180,7 +182,7 @@ async fn spawn_network(
name: &str,
info: NetInfo,
urls: Vec<Url>,
saved_hosts: Option<&FxHashSet<Url>>,
saved_hosts: Option<&HashSet<Url>>,
ex: Arc<smol::Executor<'_>>,
) -> Result<Spawn> {
let mut full_urls = Vec::new();
@@ -237,8 +239,8 @@ async fn spawn_network(
}
/// Retrieve saved hosts for provided networks
fn load_hosts(path: &Path, networks: &[&str]) -> FxHashMap<String, FxHashSet<Url>> {
let mut saved_hosts = FxHashMap::default();
fn load_hosts(path: &Path, networks: &[&str]) -> HashMap<String, HashSet<Url>> {
let mut saved_hosts = HashMap::new();
info!("Retrieving saved hosts from: {:?}", path);
let contents = load_file(path);
if let Err(e) = contents {
@@ -251,7 +253,7 @@ fn load_hosts(path: &Path, networks: &[&str]) -> FxHashMap<String, FxHashSet<Url
if networks.contains(&data[0]) {
let mut hosts = match saved_hosts.get(data[0]) {
Some(hosts) => hosts.clone(),
None => FxHashSet::default(),
None => HashSet::new(),
};
let url = match Url::parse(data[1]) {
Ok(u) => u,
@@ -269,7 +271,7 @@ fn load_hosts(path: &Path, networks: &[&str]) -> FxHashMap<String, FxHashSet<Url
}
/// Save spawns current hosts
fn save_hosts(path: &Path, spawns: FxHashMap<String, Vec<String>>) {
fn save_hosts(path: &Path, spawns: HashMap<String, Vec<String>>) {
let mut string = "".to_string();
for (name, urls) in spawns {
for url in urls {

View File

@@ -14,10 +14,9 @@ chrono = "0.4.23"
clap = {version = "4.0.29", features = ["derive"]}
colored = "2.0.0"
darkfi = { path = "../../../", features = ["rpc"]}
fxhash = "0.2.1"
log = "0.4.17"
prettytable-rs = "0.9.0"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
serde_json = "1.0.89"
simplelog = "0.12.0"
term_grid = { git = "https://github.com/Dastan-glitch/rust-term-grid.git" }

View File

@@ -16,9 +16,10 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use chrono::{Datelike, Duration, NaiveDate, NaiveDateTime, Utc};
use colored::Colorize;
use fxhash::FxHashMap;
use term_grid::{Cell, Direction, Filling, Grid, GridOptions};
use darkfi::{Error, Result};
@@ -40,7 +41,7 @@ const BLUE: u8 = 50;
/// assignee in a hashmap, draw a heatmap of how many stopped tasks in each day of the
/// specified month and assignee.
pub fn drawdown(date: String, tasks: Vec<TaskInfo>, assignee: Option<String>) -> Result<()> {
let mut ret = FxHashMap::default();
let mut ret = HashMap::new();
let assignees = assignees(tasks.clone());
if assignee.is_none() {

View File

@@ -25,12 +25,11 @@ simplelog = "0.12.0"
rand = "0.8.5"
chrono = "0.4.23"
thiserror = "1.0.37"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
url = "2.3.1"
fxhash = "0.2.1"
# Encoding and parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
serde_json = "1.0.89"
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -16,12 +16,11 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{fs::create_dir_all, path::PathBuf};
use std::{collections::HashMap, fs::create_dir_all, path::PathBuf};
use async_std::sync::Mutex;
use async_trait::async_trait;
use crypto_box::SalsaBox;
use fxhash::FxHashMap;
use log::{debug, warn};
use serde::{Deserialize, Serialize};
use serde_json::{json, Value};
@@ -48,7 +47,7 @@ pub struct JsonRpcInterface {
notify_queue_sender: smol::channel::Sender<TaskInfo>,
nickname: String,
workspace: Mutex<String>,
workspaces: FxHashMap<String, SalsaBox>,
workspaces: HashMap<String, SalsaBox>,
p2p: net::P2pPtr,
}
@@ -98,7 +97,7 @@ impl JsonRpcInterface {
dataset_path: PathBuf,
notify_queue_sender: smol::channel::Sender<TaskInfo>,
nickname: String,
workspaces: FxHashMap<String, SalsaBox>,
workspaces: HashMap<String, SalsaBox>,
p2p: net::P2pPtr,
) -> Self {
let workspace = Mutex::new(workspaces.iter().last().unwrap().0.clone());

View File

@@ -17,6 +17,7 @@
*/
use std::{
collections::HashMap,
env,
fs::{create_dir_all, remove_dir_all},
io::stdin,
@@ -30,7 +31,6 @@ use crypto_box::{
};
use darkfi_serial::{deserialize, serialize, SerialDecodable, SerialEncodable};
use futures::{select, FutureExt};
use fxhash::FxHashMap;
use log::{debug, error, info, warn};
use structopt_toml::StructOptToml;
@@ -56,8 +56,8 @@ use crate::{
task_info::TaskInfo,
};
fn get_workspaces(settings: &Args) -> Result<FxHashMap<String, SalsaBox>> {
let mut workspaces = FxHashMap::default();
fn get_workspaces(settings: &Args) -> Result<HashMap<String, SalsaBox>> {
let mut workspaces = HashMap::new();
for workspace in settings.workspaces.iter() {
let workspace: Vec<&str> = workspace.split(':').collect();
@@ -114,7 +114,7 @@ async fn start_sync_loop(
raft_msgs_sender: smol::channel::Sender<EncryptedTask>,
commits_recv: smol::channel::Receiver<EncryptedTask>,
datastore_path: std::path::PathBuf,
workspaces: FxHashMap<String, SalsaBox>,
workspaces: HashMap<String, SalsaBox>,
mut rng: crypto_box::rand_core::OsRng,
) -> TaudResult<()> {
loop {
@@ -140,7 +140,7 @@ async fn start_sync_loop(
async fn on_receive_task(
task: &EncryptedTask,
datastore_path: &Path,
workspaces: &FxHashMap<String, SalsaBox>,
workspaces: &HashMap<String, SalsaBox>,
) -> TaudResult<()> {
for (workspace, salsa_box) in workspaces.iter() {
let task = decrypt_task(task, salsa_box);
@@ -230,7 +230,7 @@ async fn realmain(settings: Args, executor: Arc<smol::Executor<'_>>) -> Result<(
//
// Raft
//
let seen_net_msgs = Arc::new(Mutex::new(FxHashMap::default()));
let seen_net_msgs = Arc::new(Mutex::new(HashMap::new()));
let datastore_raft = datastore_path.join("tau.db");
let raft_settings = RaftSettings { datastore_path: datastore_raft, ..RaftSettings::default() };

View File

@@ -22,10 +22,12 @@ use darkfi_serial::{SerialDecodable, SerialEncodable};
use log::debug;
use serde::{Deserialize, Serialize};
use darkfi::util::{
use darkfi::{
raft::gen_id,
util::{
file::{load_json_file, save_json_file},
gen_id,
time::Timestamp,
},
};
use crate::{

View File

@@ -11,10 +11,10 @@ edition = "2021"
[dependencies]
bs58 = "0.4.0"
clap = {version = "4.0.29", features = ["derive"]}
ctrlc = "3.2.3"
darkfi = {path = "../../", features = ["crypto"]}
ctrlc = "3.2.4"
darkfi = {path = "../../"}
darkfi-sdk = {path = "../../src/sdk"}
indicatif = "0.17.2"
num_cpus = "1.14.0"
rand = "0.8.5"
rayon = "1.6.0"
rayon = "1.6.1"

View File

@@ -26,6 +26,6 @@ simplelog = "0.12.0"
url = "2.3.1"
serde_json = "1.0.89"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
toml = "0.5.9"
# ANCHOR_END: dependencies

View File

@@ -19,13 +19,13 @@ pasta_curves = "0.4.1"
# Async
async-std = "1.12.0"
async-trait = "0.1.59"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
easy-parallel = "3.2.0"
smol = "1.3.0"
# Argument parsing
serde = "1.0.148"
serde_derive = "1.0.148"
serde = "1.0.150"
serde_derive = "1.0.150"
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -15,7 +15,7 @@ async-executor = "1.5.0"
async-std = "1.12.0"
async-trait = "0.1.59"
blake3 = "1.3.3"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
easy-parallel = "3.2.0"
futures-lite = "1.12.0"
log = "0.4.17"
@@ -24,8 +24,8 @@ simplelog = "0.12.0"
url = "2.3.1"
# Argument parsing
serde = "1.0.148"
serde_derive = "1.0.148"
serde = "1.0.150"
serde_derive = "1.0.150"
structopt = "0.3.26"
structopt-toml = "0.5.1"

View File

@@ -13,5 +13,5 @@ blake3 = "1.3.3"
darkfi = {path = "../../../", features = ["blockchain", "wallet", "rpc"]}
darkfi-sdk = {path = "../../../src/sdk"}
darkfi-serial = {path = "../../../src/serial"}
serde = "1.0.148"
serde = "1.0.150"
sled = "0.34.7"

View File

@@ -24,12 +24,12 @@ simplelog = "0.12.0"
rand = "0.8.5"
chrono = "0.4.23"
thiserror = "1.0.37"
ctrlc = { version = "3.2.3", features = ["termination"] }
ctrlc = { version = "3.2.4", features = ["termination"] }
url = "2.3.1"
fxhash = "0.2.1"
# Encoding and parsing
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
serde_json = "1.0.89"
structopt = "0.3.26"
hex = "0.4.3"

View File

@@ -16,5 +16,5 @@ lazy-init = "0.5.1"
log = "0.4.17"
pasta_curves = "0.4.1"
rand = "0.8.5"
serde = {version = "1.0.148", features = ["derive"]}
serde = {version = "1.0.150", features = ["derive"]}
sled = "0.34.7"

View File

@@ -18,10 +18,12 @@
use std::fmt;
use darkfi_sdk::crypto::{constants::MERKLE_DEPTH, MerkleNode};
use darkfi_sdk::{
crypto::{constants::MERKLE_DEPTH, MerkleNode},
incrementalmerkletree::{bridgetree::BridgeTree, Tree},
pasta::pallas,
};
use darkfi_serial::{serialize, SerialDecodable, SerialEncodable};
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use pasta_curves::pallas;
use super::{
constants::{BLOCK_MAGIC_BYTES, BLOCK_VERSION},

View File

@@ -23,10 +23,10 @@ use darkfi_sdk::{
util::mod_r_p,
MerkleNode, SecretKey,
},
incrementalmerkletree::{bridgetree::BridgeTree, Tree},
pasta::{arithmetic::CurveAffine, group::Curve, pallas},
};
use halo2_proofs::{arithmetic::Field, circuit::Value};
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use log::info;
use rand::rngs::OsRng;

View File

@@ -19,11 +19,13 @@
use std::time::Duration;
use chrono::{NaiveDateTime, Utc};
use darkfi_sdk::crypto::{constants::MERKLE_DEPTH, MerkleNode};
use darkfi_sdk::{
crypto::{constants::MERKLE_DEPTH, MerkleNode},
incrementalmerkletree::bridgetree::BridgeTree,
pasta::{group::ff::PrimeField, pallas},
};
use darkfi_serial::{SerialDecodable, SerialEncodable};
use incrementalmerkletree::bridgetree::BridgeTree;
use log::info;
use pasta_curves::{group::ff::PrimeField, pallas};
use rand::{thread_rng, Rng};
use super::{

View File

@@ -1,3 +1,21 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2022 Dyne.org foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use darkfi_serial::{SerialDecodable, SerialEncodable};
use crate::consensus::{EncryptedTxRcpt, TransferStx};

View File

@@ -16,12 +16,11 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use super::Float10;
use darkfi_sdk::pasta::pallas;
use dashu::integer::{IBig, Sign};
use log::debug;
use pasta_curves::pallas;
//use pasta_curves::{group::ff::PrimeField};
//use dashu::integer::{UBig};
use super::Float10;
pub fn fbig2ibig(f: Float10) -> IBig {
let rad = IBig::try_from(10).unwrap();

View File

@@ -26,11 +26,11 @@ use darkfi_sdk::{
ContractId, MerkleNode, PublicKey,
},
db::ZKAS_DB_NAME,
incrementalmerkletree::{bridgetree::BridgeTree, Tree},
pasta::{group::ff::PrimeField, pallas},
};
use darkfi_serial::{deserialize, serialize, Decodable, Encodable, WriteExt};
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use log::{debug, error, info, warn};
use pasta_curves::{group::ff::PrimeField, pallas};
use rand::rngs::OsRng;
use serde_json::json;

View File

@@ -15,7 +15,7 @@ darkfi-serial = { path = "../../serial", features = ["derive", "crypto"] }
# The following dependencies are used for the client API and
# probably shouldn't be in WASM
chacha20poly1305 = { version = "0.10.1", optional = true }
darkfi = { path = "../../../", features = ["crypto", "rpc"], optional = true }
darkfi = { path = "../../../", features = ["zk", "rpc"], optional = true }
halo2_proofs = { version = "0.2.0", optional = true }
log = { version = "0.4.17", optional = true }
rand = { version = "0.8.5", optional = true }

View File

@@ -27,9 +27,6 @@ pub mod blockchain;
#[cfg(feature = "blockchain")]
pub mod consensus;
#[cfg(feature = "crypto")]
pub mod zk;
#[cfg(feature = "dht")]
pub mod dht;
@@ -57,6 +54,9 @@ pub mod wallet;
#[cfg(feature = "wasm-runtime")]
pub mod runtime;
#[cfg(feature = "zk")]
pub mod zk;
#[cfg(feature = "zkas")]
pub mod zkas;

View File

@@ -16,10 +16,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use async_std::sync::{Arc, Mutex};
use std::net::IpAddr;
use std::{
collections::{HashMap, HashSet},
net::IpAddr,
};
use fxhash::{FxHashMap, FxHashSet};
use async_std::sync::{Arc, Mutex};
use ipnet::{Ipv4Net, Ipv6Net};
use iprange::IpRange;
use log::{debug, error, warn};
@@ -33,7 +35,7 @@ pub type HostsPtr = Arc<Hosts>;
/// Manages a store of network addresses.
pub struct Hosts {
addrs: Mutex<FxHashSet<Url>>,
addrs: Mutex<HashSet<Url>>,
localnet: bool,
ipv4_range: IpRange<Ipv4Net>,
ipv6_range: IpRange<Ipv6Net>,
@@ -52,7 +54,7 @@ impl Hosts {
ipv4_range.simplify();
ipv6_range.simplify();
Arc::new(Self { addrs: Mutex::new(FxHashSet::default()), localnet, ipv4_range, ipv6_range })
Arc::new(Self { addrs: Mutex::new(HashSet::new()), localnet, ipv4_range, ipv6_range })
}
/// Add a new host to the host list, after filtering.
@@ -134,9 +136,9 @@ fn filter_invalid(
ipv4_range: &IpRange<Ipv4Net>,
ipv6_range: &IpRange<Ipv6Net>,
input_addrs: Vec<Url>,
) -> FxHashMap<Url, Vec<IpAddr>> {
) -> HashMap<Url, Vec<IpAddr>> {
debug!(target: "net", "hosts::filter_invalid() [Input addresses: {:?}]", input_addrs);
let mut filtered = FxHashMap::default();
let mut filtered = HashMap::new();
for addr in &input_addrs {
// Discard domainless Urls
let domain = match addr.domain() {
@@ -211,10 +213,7 @@ fn filter_invalid(
/// Filters `input_addrs` keys to whatever has at least one `IpAddr` that is
/// the same as `connection_addr`'s IP address.
/// Skips .onion domains.
fn filter_non_resolving(
connection_addr: Url,
input_addrs: FxHashMap<Url, Vec<IpAddr>>,
) -> Vec<Url> {
fn filter_non_resolving(connection_addr: Url, input_addrs: HashMap<Url, Vec<IpAddr>>) -> Vec<Url> {
debug!(target: "net", "hosts::filter_non_resolving() [Input addresses: {:?}]", input_addrs);
debug!(target: "net", "hosts::filter_non_resolving() [Connection address: {}]", connection_addr);
@@ -299,13 +298,13 @@ fn is_valid_onion(onion: &str) -> bool {
#[cfg(test)]
mod tests {
use std::net::{IpAddr, Ipv4Addr};
use std::{
collections::{HashMap, HashSet},
net::{IpAddr, Ipv4Addr},
};
use fxhash::{FxHashMap, FxHashSet};
use ipnet::{Ipv4Net, Ipv6Net};
use iprange::IpRange;
// Uncomment for inner logging
//use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode};
use url::Url;
use crate::net::{
@@ -317,11 +316,11 @@ mod tests {
fn test_filter_localnet() {
// Uncomment for inner logging
/*
TermLogger::init(
LevelFilter::Debug,
Config::default(),
TerminalMode::Mixed,
ColorChoice::Auto,
simplelog::TermLogger::init(
simplelog::LevelFilter::Debug,
simplelog::Config::default(),
simplelog::TerminalMode::Mixed,
simplelog::ColorChoice::Auto,
)
.unwrap();
*/
@@ -340,11 +339,11 @@ mod tests {
// Create expected output addresses vector
let output_addrs = vec![valid, onion];
let output_addrs = FxHashSet::from_iter(output_addrs.iter());
let output_addrs: HashSet<&Url> = HashSet::from_iter(output_addrs.iter());
// Execute filtering for v4 addr
let filtered = filter_localnet(input_addrs);
let filtered = FxHashSet::from_iter(filtered.iter());
let filtered: HashSet<&Url> = HashSet::from_iter(filtered.iter());
// Validate filtered addresses
assert_eq!(output_addrs, filtered);
}
@@ -390,12 +389,12 @@ mod tests {
// Create expected output addresses vector
let output_addrs = vec![valid, onion];
let output_addrs = FxHashSet::from_iter(output_addrs.iter());
let output_addrs: HashSet<&Url> = HashSet::from_iter(output_addrs.iter());
// Execute filtering for v4 addr
let filtered = filter_invalid(&ipv4_range, &ipv6_range, input_addrs);
let filtered: Vec<Url> = filtered.into_iter().map(|(k, _)| k).collect();
let filtered = FxHashSet::from_iter(filtered.iter());
let filtered: HashSet<&Url> = HashSet::from_iter(filtered.iter());
// Validate filtered addresses
assert_eq!(output_addrs, filtered);
}
@@ -426,7 +425,7 @@ mod tests {
.unwrap();
// Create input addresses hashmap, containing created addresses, excluding connection url
let mut input_addrs = FxHashMap::default();
let mut input_addrs = HashMap::new();
input_addrs.insert(
resolving_url.clone(),
vec![
@@ -438,7 +437,7 @@ mod tests {
input_addrs.insert(onion.clone(), vec![]);
// Create expected output addresses hashset
let mut output_addrs = FxHashMap::default();
let mut output_addrs = HashMap::new();
output_addrs.insert(
resolving_url,
vec![
@@ -449,28 +448,28 @@ mod tests {
output_addrs.insert(onion.clone(), vec![]);
// Convert hashmap to Vec<Url and then to hashset, to ignore shuffling
let output_addrs: Vec<Url> = output_addrs.into_iter().map(|(k, _)| k).collect();
let output_addrs = FxHashSet::from_iter(output_addrs.iter());
let output_addrs: HashSet<&Url> = HashSet::from_iter(output_addrs.iter());
let mut fake_output_addrs: FxHashMap<Url, Vec<Url>> = FxHashMap::default();
let mut fake_output_addrs: HashMap<Url, Vec<Url>> = HashMap::new();
// Onion addresses don't get filtered, as we can't resolve them
fake_output_addrs.insert(onion, vec![]);
let fake_output_addrs: Vec<Url> = fake_output_addrs.into_iter().map(|(k, _)| k).collect();
let fake_output_addrs = FxHashSet::from_iter(fake_output_addrs.iter());
let fake_output_addrs: HashSet<&Url> = HashSet::from_iter(fake_output_addrs.iter());
// Execute filtering for v4 addr
let filtered = filter_non_resolving(connection_url_v4, input_addrs.clone());
let filtered = FxHashSet::from_iter(filtered.iter());
let filtered = HashSet::from_iter(filtered.iter());
// Validate filtered addresses
assert_eq!(output_addrs, filtered);
// Execute filtering for v6 addr
let filtered = filter_non_resolving(connection_url_v6, input_addrs.clone());
let filtered = FxHashSet::from_iter(filtered.iter());
let filtered = HashSet::from_iter(filtered.iter());
assert_eq!(output_addrs, filtered);
// Execute filtering for fake addr
let filtered = filter_non_resolving(fake_connection_url, input_addrs);
let filtered = FxHashSet::from_iter(filtered.iter());
let filtered = HashSet::from_iter(filtered.iter());
assert_eq!(fake_output_addrs, filtered);
}

View File

@@ -16,11 +16,10 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use async_std::sync::Mutex;
use std::{any::Any, io::Cursor, sync::Arc};
use std::{any::Any, collections::HashMap, io::Cursor, sync::Arc};
use async_std::sync::Mutex;
use async_trait::async_trait;
use fxhash::FxHashMap;
use log::{debug, warn};
use rand::Rng;
@@ -69,13 +68,13 @@ trait MessageDispatcherInterface: Send + Sync {
/// A dispatchers that is unique to every Message. Maintains a list of subscribers that are subscribed to that unique Message type and handles sending messages across these subscriptions.
struct MessageDispatcher<M: Message> {
subs: Mutex<FxHashMap<MessageSubscriptionId, smol::channel::Sender<MessageResult<M>>>>,
subs: Mutex<HashMap<MessageSubscriptionId, smol::channel::Sender<MessageResult<M>>>>,
}
impl<M: Message> MessageDispatcher<M> {
/// Create a new message dispatcher.
fn new() -> Self {
MessageDispatcher { subs: Mutex::new(FxHashMap::default()) }
MessageDispatcher { subs: Mutex::new(HashMap::new()) }
}
/// Create a random ID.
@@ -177,13 +176,13 @@ impl<M: Message> MessageDispatcherInterface for MessageDispatcher<M> {
/// Generic publish/subscribe class that maintains a list of dispatchers. Dispatchers transmit
/// messages to subscribers and are specific to one message type.
pub struct MessageSubsystem {
dispatchers: Mutex<FxHashMap<&'static str, Arc<dyn MessageDispatcherInterface>>>,
dispatchers: Mutex<HashMap<&'static str, Arc<dyn MessageDispatcherInterface>>>,
}
impl MessageSubsystem {
/// Create a new message subsystem.
pub fn new() -> Self {
MessageSubsystem { dispatchers: Mutex::new(FxHashMap::default()) }
MessageSubsystem { dispatchers: Mutex::new(HashMap::new()) }
}
/// Add a new dispatcher for specified Message.

View File

@@ -16,11 +16,13 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::fmt;
use std::{
collections::{HashMap, HashSet},
fmt,
};
use async_std::sync::{Arc, Mutex};
use futures::{select, stream::FuturesUnordered, try_join, FutureExt, StreamExt, TryFutureExt};
use fxhash::{FxHashMap, FxHashSet};
use log::{debug, error, warn};
use rand::Rng;
use serde_json::json;
@@ -41,9 +43,9 @@ use super::{
};
/// List of channels that are awaiting connection.
pub type PendingChannels = Mutex<FxHashSet<Url>>;
pub type PendingChannels = Mutex<HashSet<Url>>;
/// List of connected channels.
pub type ConnectedChannels = Mutex<fxhash::FxHashMap<Url, Arc<Channel>>>;
pub type ConnectedChannels = Mutex<HashMap<Url, Arc<Channel>>>;
/// Atomic pointer to p2p interface.
pub type P2pPtr = Arc<P2p>;
@@ -108,8 +110,8 @@ impl P2p {
let settings = Arc::new(settings);
let self_ = Arc::new(Self {
pending: Mutex::new(FxHashSet::default()),
channels: Mutex::new(FxHashMap::default()),
pending: Mutex::new(HashSet::new()),
channels: Mutex::new(HashMap::new()),
channel_subscriber: Subscriber::new(),
stop_subscriber: Subscriber::new(),
hosts: Hosts::new(settings.localnet),

View File

@@ -15,11 +15,10 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use async_std::sync::{Arc, Mutex, Weak};
use async_trait::async_trait;
use fxhash::FxHashMap;
use log::{error, info};
use serde_json::json;
use smol::Executor;
@@ -50,7 +49,7 @@ pub struct InboundSession {
p2p: Weak<P2p>,
acceptors: Mutex<Vec<AcceptorPtr>>,
accept_tasks: Mutex<Vec<StoppableTaskPtr>>,
connect_infos: Mutex<Vec<FxHashMap<Url, InboundInfo>>>,
connect_infos: Mutex<Vec<HashMap<Url, InboundInfo>>>,
}
impl InboundSession {
@@ -89,7 +88,7 @@ impl InboundSession {
executor.clone(),
);
self.connect_infos.lock().await.push(FxHashMap::default());
self.connect_infos.lock().await.push(HashMap::new());
accept_tasks.push(task);
}
@@ -185,7 +184,7 @@ impl InboundSession {
#[async_trait]
impl Session for InboundSession {
async fn get_info(&self) -> serde_json::Value {
let mut infos = FxHashMap::default();
let mut infos = HashMap::new();
for (index, accept_addr) in self.p2p().settings().inbound.iter().enumerate() {
let connect_infos = &self.connect_infos.lock().await[index];
for (addr, info) in connect_infos {

View File

@@ -16,7 +16,7 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::time::Duration;
use std::{collections::HashMap, time::Duration};
use async_std::{
sync::{Arc, Mutex},
@@ -25,12 +25,11 @@ use async_std::{
use chrono::Utc;
use darkfi_serial::{deserialize, serialize, Decodable, Encodable};
use futures::{select, FutureExt};
use fxhash::FxHashMap;
use log::{debug, error, warn};
use rand::{rngs::OsRng, Rng, RngCore};
use rand::{distributions::Alphanumeric, rngs::OsRng, thread_rng, Rng, RngCore};
use smol::Executor;
use crate::{net, util::gen_id, Error, Result};
use crate::{net, Error, Result};
use super::{
p2p_send_loop,
@@ -48,6 +47,10 @@ async fn send_loop(sender: smol::channel::Sender<()>, timeout: Duration) -> Resu
}
}
pub fn gen_id(len: usize) -> String {
thread_rng().sample_iter(&Alphanumeric).take(len).map(char::from).collect()
}
pub struct Raft<T> {
id: NodeId,
@@ -60,7 +63,7 @@ pub struct Raft<T> {
pub(super) sent_length: MapLength,
pub(super) acked_length: MapLength,
pub(super) nodes: Arc<Mutex<FxHashMap<NodeId, i64>>>,
pub(super) nodes: Arc<Mutex<HashMap<NodeId, i64>>>,
pub(super) last_term: u64,
@@ -73,7 +76,7 @@ pub struct Raft<T> {
datastore: DataStore<T>,
seen_msgs: Arc<Mutex<FxHashMap<String, i64>>>,
seen_msgs: Arc<Mutex<HashMap<String, i64>>>,
pub(super) settings: RaftSettings,
@@ -83,7 +86,7 @@ pub struct Raft<T> {
impl<T: Decodable + Encodable + Clone> Raft<T> {
pub fn new(
settings: RaftSettings,
seen_msgs: Arc<Mutex<FxHashMap<String, i64>>>,
seen_msgs: Arc<Mutex<HashMap<String, i64>>>,
) -> Result<Self> {
if settings.datastore_path.to_str().is_none() {
error!(target: "raft", "datastore path is incorrect");
@@ -115,9 +118,9 @@ impl<T: Decodable + Encodable + Clone> Raft<T> {
role,
current_leader: NodeId("".into()),
votes_received: vec![],
sent_length: MapLength(FxHashMap::default()),
acked_length: MapLength(FxHashMap::default()),
nodes: Arc::new(Mutex::new(FxHashMap::default())),
sent_length: MapLength(HashMap::default()),
acked_length: MapLength(HashMap::default()),
nodes: Arc::new(Mutex::new(HashMap::default())),
last_term: 0,
last_heartbeat: Utc::now().timestamp(),
p2p_sender,

View File

@@ -15,9 +15,9 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use darkfi_serial::{serialize, Decodable, Encodable};
use fxhash::FxHashMap;
use crate::Result;
@@ -93,7 +93,7 @@ impl<T: Decodable + Encodable + Clone> Raft<T> {
Ok(())
}
fn acks(&self, nodes: FxHashMap<NodeId, i64>, length: u64) -> FxHashMap<NodeId, i64> {
fn acks(&self, nodes: HashMap<NodeId, i64>, length: u64) -> HashMap<NodeId, i64> {
nodes
.into_iter()
.filter(|n| {

View File

@@ -16,8 +16,9 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use async_std::sync::{Arc, Mutex};
use std::collections::HashMap;
use async_std::sync::{Arc, Mutex};
use chrono::Utc;
use log::{debug, error};
@@ -32,7 +33,7 @@ mod primitives;
mod protocol_raft;
mod settings;
pub use consensus::Raft;
pub use consensus::{gen_id, Raft};
pub use datastore::DataStore;
pub use primitives::NetMsg;
pub use protocol_raft::ProtocolRaft;
@@ -40,7 +41,7 @@ pub use settings::RaftSettings;
// Auxilary function to periodically prun items, based on when they were received.
async fn prune_map<T: Clone + Eq + std::hash::Hash>(
map: Arc<Mutex<fxhash::FxHashMap<T, i64>>>,
map: Arc<Mutex<HashMap<T, i64>>>,
seen_duration: i64,
) {
loop {

View File

@@ -16,10 +16,9 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::io;
use std::{collections::HashMap, io};
use darkfi_serial::{Decodable, Encodable, SerialDecodable, SerialEncodable};
use fxhash::FxHashMap;
use crate::{Error, Result};
@@ -145,7 +144,7 @@ impl Logs {
}
#[derive(Clone, Debug)]
pub struct MapLength(pub FxHashMap<NodeId, u64>);
pub struct MapLength(pub HashMap<NodeId, u64>);
impl MapLength {
pub fn get(&self, key: &NodeId) -> Result<u64> {

View File

@@ -15,12 +15,12 @@
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use async_std::sync::{Arc, Mutex};
use async_trait::async_trait;
use chrono::Utc;
use darkfi_serial::serialize;
use fxhash::FxHashMap;
use log::debug;
use rand::{rngs::OsRng, RngCore};
use smol::Executor;
@@ -34,7 +34,7 @@ pub struct ProtocolRaft {
notify_queue_sender: smol::channel::Sender<NetMsg>,
msg_sub: net::MessageSubscription<NetMsg>,
p2p: net::P2pPtr,
seen_msgs: Arc<Mutex<FxHashMap<String, i64>>>,
seen_msgs: Arc<Mutex<HashMap<String, i64>>>,
channel: net::ChannelPtr,
}
@@ -44,7 +44,7 @@ impl ProtocolRaft {
channel: net::ChannelPtr,
notify_queue_sender: smol::channel::Sender<NetMsg>,
p2p: net::P2pPtr,
seen_msgs: Arc<Mutex<FxHashMap<String, i64>>>,
seen_msgs: Arc<Mutex<HashMap<String, i64>>>,
) -> net::ProtocolBasePtr {
let message_subsytem = channel.get_message_subsystem();
message_subsytem.add_dispatch::<NetMsg>().await;

View File

@@ -14,7 +14,6 @@ futures-lite = {version = "1.12.0", optional = true}
# Supported types for encoding
blake3 = {version = "1.3.3", optional = true}
fxhash = {version = "0.2.1", optional = true}
incrementalmerkletree = {version = "0.3.0", optional = true}
pasta_curves = {version = "0.4.1", optional = true}
url = {version = "2.3.1", optional = true}
@@ -24,6 +23,6 @@ default = ["derive"]
derive = ["darkfi-derive"]
async = ["futures-lite"]
collections = ["fxhash"]
collections = []
crypto = ["collections", "hash", "incrementalmerkletree", "pasta_curves"]
hash = ["blake3"]

View File

@@ -11,4 +11,4 @@ edition = "2021"
[dependencies]
proc-macro2 = "1.0.47"
quote = "1.0.21"
syn = {version = "1.0.104", features = ["full", "fold"]}
syn = {version = "1.0.105", features = ["full", "fold"]}

View File

@@ -14,6 +14,6 @@ proc-macro = true
[dependencies]
proc-macro-crate = "1.2.1"
proc-macro2 = "1.0.47"
syn = {version = "1.0.104", features = ["full", "fold"]}
syn = {version = "1.0.105", features = ["full", "fold"]}
darkfi-derive-internal = {path = "../derive-internal"}

View File

@@ -95,19 +95,6 @@ impl<T: Decodable + std::cmp::Ord> Decodable for BTreeSet<T> {
}
}
#[cfg(feature = "fxhash")]
impl<T: Encodable, U: Encodable> Encodable for fxhash::FxHashMap<T, U> {
fn encode<S: Write>(&self, mut s: S) -> Result<usize, Error> {
let mut len = 0;
len += VarInt(self.len() as u64).encode(&mut s)?;
for c in self.iter() {
len += c.0.encode(&mut s)?;
len += c.1.encode(&mut s)?;
}
Ok(len)
}
}
impl<T: Decodable + std::cmp::Eq + std::hash::Hash, U: Decodable> Decodable for HashMap<T, U> {
fn decode<D: Read>(mut d: D) -> Result<Self, Error> {
let len = VarInt::decode(&mut d)?.0;
@@ -132,19 +119,3 @@ impl<T: Encodable, U: Encodable> Encodable for HashMap<T, U> {
Ok(len)
}
}
#[cfg(feature = "fxhash")]
impl<T: Decodable + std::cmp::Eq + std::hash::Hash, U: Decodable> Decodable
for fxhash::FxHashMap<T, U>
{
fn decode<D: Read>(mut d: D) -> Result<Self, Error> {
let len = VarInt::decode(&mut d)?.0;
let mut ret = fxhash::FxHashMap::default();
for _ in 0..len {
let key: T = Decodable::decode(&mut d)?;
let entry: U = Decodable::decode(&mut d)?;
ret.insert(key, entry);
}
Ok(ret)
}
}

View File

@@ -16,8 +16,9 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::HashMap;
use async_std::sync::{Arc, Mutex};
use fxhash::FxHashMap;
use log::warn;
use rand::Rng;
@@ -55,12 +56,12 @@ impl<T: Clone> Subscription<T> {
// Simple broadcast (publish-subscribe) class
pub struct Subscriber<T> {
subs: Mutex<FxHashMap<u64, smol::channel::Sender<T>>>,
subs: Mutex<HashMap<u64, smol::channel::Sender<T>>>,
}
impl<T: Clone> Subscriber<T> {
pub fn new() -> Arc<Self> {
Arc::new(Self { subs: Mutex::new(FxHashMap::default()) })
Arc::new(Self { subs: Mutex::new(HashMap::new()) })
}
fn random_id() -> SubscriptionId {

View File

@@ -22,13 +22,10 @@ use std::{
marker::PhantomData,
path::{Path, PathBuf},
str,
time::Duration,
};
use indicatif::{ProgressBar, ProgressStyle};
use serde::{de::DeserializeOwned, Serialize};
use simplelog::ConfigBuilder;
use termion::color;
use crate::{Error, Result};
@@ -206,32 +203,14 @@ macro_rules! async_daemonize {
};
}
pub fn progress_bar(message: &str) -> ProgressBar {
let progress_bar = ProgressBar::new(42);
progress_bar.set_style(
ProgressStyle::default_spinner().template("{spinner:.green} {wide_msg}").unwrap(),
);
progress_bar.enable_steady_tick(Duration::from_millis(100));
progress_bar.set_message(message.to_string());
progress_bar
}
pub fn fg_red(message: &str) -> String {
format!("{}{}{}", color::Fg(color::Red), message, color::Fg(color::Reset))
format!("\x1b[31m{}\x1b[0m", message)
}
pub fn fg_green(message: &str) -> String {
format!("{}{}{}", color::Fg(color::Green), message, color::Fg(color::Reset))
}
pub fn start_fg_red() -> String {
format!("{}", color::Fg(color::Red))
}
pub fn start_fg_green() -> String {
format!("{}", color::Fg(color::Green))
format!("\x1b[32m{}\x1b[0m", message)
}
pub fn fg_reset() -> String {
format!("{}", color::Fg(color::Reset))
format!("\x1b[0m")
}

View File

@@ -40,12 +40,3 @@ pub mod path;
/// Time utilities
pub mod time;
// =======================
// TODO: Why is this here?
// =======================
use rand::{distributions::Alphanumeric, thread_rng, Rng};
pub fn gen_id(len: usize) -> String {
thread_rng().sample_iter(&Alphanumeric).take(len).map(char::from).collect()
}
// ======================

View File

@@ -1,658 +0,0 @@
/* This file is part of DarkFi (https://dark.fi)
*
* Copyright (C) 2020-2022 Dyne.org foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::{fs::create_dir_all, path::Path, str::FromStr, time::Duration};
use async_std::sync::Arc;
use darkfi_sdk::crypto::{
constants::MERKLE_DEPTH, Address, Keypair, MerkleNode, Nullifier, PublicKey, SecretKey, TokenId,
};
use darkfi_serial::{deserialize, serialize};
use incrementalmerkletree::bridgetree::BridgeTree;
use log::{debug, error, info, LevelFilter};
use rand::rngs::OsRng;
use sqlx::{
sqlite::{SqliteConnectOptions, SqliteJournalMode},
ConnectOptions, Row, SqlitePool,
};
use crate::{
crypto::{
coin::{Coin, OwnCoin},
note::Note,
},
util::path::expand_path,
Error::{WalletEmptyPassword, WalletTreeExists},
Result,
};
pub type WalletPtr = Arc<WalletDb>;
#[derive(Clone, Debug)]
pub struct Balance {
pub token_id: TokenId,
pub value: u64,
pub nullifier: Nullifier,
}
#[derive(Clone, Debug)]
pub struct Balances {
pub list: Vec<Balance>,
}
pub struct WalletDb {
pub conn: SqlitePool,
}
/// Helper function to initialize `WalletPtr`
pub async fn init_wallet(wallet_path: &str, wallet_pass: &str) -> Result<WalletPtr> {
let expanded = expand_path(wallet_path)?;
let wallet_path = format!("sqlite://{}", expanded.to_str().unwrap());
let wallet = WalletDb::new(&wallet_path, wallet_pass).await?;
Ok(wallet)
}
impl WalletDb {
pub async fn new(path: &str, password: &str) -> Result<WalletPtr> {
if password.trim().is_empty() {
error!("Password is empty. You must set a password to use the wallet.");
return Err(WalletEmptyPassword)
}
if path != "sqlite::memory:" {
let p = Path::new(path.strip_prefix("sqlite://").unwrap());
if let Some(dirname) = p.parent() {
info!("Creating path to database: {}", dirname.display());
create_dir_all(&dirname)?;
}
}
let mut connect_opts = SqliteConnectOptions::from_str(path)?
.pragma("key", password.to_string())
.create_if_missing(true)
.journal_mode(SqliteJournalMode::Off);
connect_opts.log_statements(LevelFilter::Trace);
connect_opts.log_slow_statements(LevelFilter::Trace, Duration::from_micros(10));
let conn = SqlitePool::connect_with(connect_opts).await?;
info!("Opened connection at path {}", path);
Ok(Arc::new(WalletDb { conn }))
}
pub async fn init_db(&self) -> Result<()> {
info!("Initializing wallet database");
let tree = include_str!("../../script/sql/tree.sql");
let keys = include_str!("../../script/sql/keys.sql");
let coins = include_str!("../../script/sql/coins.sql");
let mut conn = self.conn.acquire().await?;
debug!("Initializing merkle tree table");
sqlx::query(tree).execute(&mut conn).await?;
debug!("Initializing keys table");
sqlx::query(keys).execute(&mut conn).await?;
debug!("Initializing coins table");
sqlx::query(coins).execute(&mut conn).await?;
Ok(())
}
pub async fn keygen(&self) -> Result<Keypair> {
debug!("Attempting to generate keypairs");
let keypair = Keypair::random(&mut OsRng);
self.put_keypair(&keypair).await?;
Ok(keypair)
}
pub async fn put_keypair(&self, keypair: &Keypair) -> Result<()> {
debug!("Writing keypair into the wallet database");
let pubkey = serialize(&keypair.public);
let secret = serialize(&keypair.secret);
let is_default = 0;
let mut conn = self.conn.acquire().await?;
sqlx::query("INSERT INTO keys(public, secret, is_default) VALUES (?1, ?2, ?3)")
.bind(pubkey)
.bind(secret)
.bind(is_default)
.execute(&mut conn)
.await?;
Ok(())
}
pub async fn set_default_keypair(&self, public: &PublicKey) -> Result<Keypair> {
debug!("Set default keypair");
let mut conn = self.conn.acquire().await?;
let pubkey = serialize(public);
// unset previous default keypair
sqlx::query("UPDATE keys SET is_default = 0;").execute(&mut conn).await?;
// set new default keypair
sqlx::query("UPDATE keys SET is_default = 1 WHERE public = ?1;")
.bind(pubkey)
.execute(&mut conn)
.await?;
let keypair = self.get_default_keypair().await?;
Ok(keypair)
}
pub async fn get_default_keypair(&self) -> Result<Keypair> {
debug!("Returning default keypair");
let mut conn = self.conn.acquire().await?;
let is_default = 1;
let row = sqlx::query("SELECT * FROM keys WHERE is_default = ?1;")
.bind(is_default)
.fetch_one(&mut conn)
.await?;
let public: PublicKey = deserialize(row.get("public"))?;
let secret: SecretKey = deserialize(row.get("secret"))?;
Ok(Keypair { secret, public })
}
pub async fn get_default_address(&self) -> Result<Address> {
debug!("Returning default address");
let keypair = self.get_default_keypair_or_create_one().await?;
Ok(Address::from(keypair.public))
}
pub async fn get_default_keypair_or_create_one(&self) -> Result<Keypair> {
debug!("Returning default keypair or create one");
let default_keypair = self.get_default_keypair().await;
let keypair = if default_keypair.is_err() {
let keypairs = self.get_keypairs().await?;
let kp = if keypairs.is_empty() { self.keygen().await? } else { keypairs[0] };
self.set_default_keypair(&kp.public).await?;
kp
} else {
default_keypair?
};
Ok(keypair)
}
pub async fn get_keypairs(&self) -> Result<Vec<Keypair>> {
debug!("Returning keypairs");
let mut conn = self.conn.acquire().await?;
let mut keypairs = vec![];
for row in sqlx::query("SELECT * FROM keys").fetch_all(&mut conn).await? {
let public: PublicKey = deserialize(row.get("public"))?;
let secret: SecretKey = deserialize(row.get("secret"))?;
keypairs.push(Keypair { public, secret });
}
Ok(keypairs)
}
pub async fn tree_gen(&self) -> Result<BridgeTree<MerkleNode, MERKLE_DEPTH>> {
debug!("Attempting to generate merkle tree");
let mut conn = self.conn.acquire().await?;
match sqlx::query("SELECT * FROM tree").fetch_one(&mut conn).await {
Ok(_) => {
error!("Merkle tree already exists");
Err(WalletTreeExists)
}
Err(_) => {
let tree = BridgeTree::<MerkleNode, MERKLE_DEPTH>::new(100);
self.put_tree(&tree).await?;
Ok(tree)
}
}
}
pub async fn get_tree(&self) -> Result<BridgeTree<MerkleNode, MERKLE_DEPTH>> {
debug!("Getting merkle tree");
let mut conn = self.conn.acquire().await?;
let row = sqlx::query("SELECT * FROM tree").fetch_one(&mut conn).await?;
let tree = deserialize(row.get("tree"))?;
Ok(tree)
}
pub async fn put_tree(&self, tree: &BridgeTree<MerkleNode, MERKLE_DEPTH>) -> Result<()> {
debug!("put_tree(): Attempting to write merkle tree");
let mut conn = self.conn.acquire().await?;
let tree_bytes = serialize(tree);
debug!("put_tree(): Deleting old row");
sqlx::query("DELETE FROM tree;").execute(&mut conn).await?;
debug!("put_tree(): Inserting new tree");
sqlx::query("INSERT INTO tree (tree) VALUES (?1);")
.bind(tree_bytes)
.execute(&mut conn)
.await?;
Ok(())
}
pub async fn get_own_coins(&self) -> Result<Vec<OwnCoin>> {
debug!("Finding own coins");
let is_spent = 0;
let mut conn = self.conn.acquire().await?;
let rows = sqlx::query("SELECT * FROM coins WHERE is_spent = ?1;")
.bind(is_spent)
.fetch_all(&mut conn)
.await?;
let mut own_coins = vec![];
for row in rows {
let coin = deserialize(row.get("coin"))?;
// Note
let serial = deserialize(row.get("serial"))?;
let coin_blind = deserialize(row.get("coin_blind"))?;
let value_blind = deserialize(row.get("valcom_blind"))?;
let value = deserialize(row.get("value"))?;
let token_id = deserialize(row.get("token_id"))?;
let token_blind = deserialize(row.get("token_blind"))?;
let memo = deserialize(row.get("memo"))?;
let note = Note { serial, value, token_id, coin_blind, value_blind, token_blind, memo };
let secret = deserialize(row.get("secret"))?;
let nullifier = deserialize(row.get("nullifier"))?;
let leaf_position = deserialize(row.get("leaf_position"))?;
let oc = OwnCoin { coin, note, secret, nullifier, leaf_position };
own_coins.push(oc);
}
Ok(own_coins)
}
pub async fn get_coins_valtok(
&self,
value: u64,
token_id: TokenId,
unspent: bool,
) -> Result<Vec<OwnCoin>> {
debug!("Querying for coins with value {} and token_id {}", value, token_id,);
let mut conn = self.conn.acquire().await?;
let rows = match unspent {
true => {
sqlx::query(
"SELECT * FROM coins WHERE is_spent = ?1 AND value = ?2 AND token_id = ?3;",
)
.bind(0)
.bind(serialize(&value))
.bind(serialize(&token_id))
.fetch_all(&mut conn)
.await?
}
false => {
sqlx::query("SELECT * FROM coins WHERE value = ?1 AND token_id = ?2;")
.bind(serialize(&value))
.bind(serialize(&token_id))
.fetch_all(&mut conn)
.await?
}
};
let mut coins = vec![];
for row in rows {
let coin = deserialize(row.get("coin"))?;
// Note
let serial = deserialize(row.get("serial"))?;
let coin_blind = deserialize(row.get("coin_blind"))?;
let value_blind = deserialize(row.get("valcom_blind"))?;
let value = deserialize(row.get("value"))?;
let token_id = deserialize(row.get("token_id"))?;
let token_blind = deserialize(row.get("token_blind"))?;
let memo = deserialize(row.get("memo"))?;
let note = Note { serial, value, token_id, coin_blind, value_blind, token_blind, memo };
let secret = deserialize(row.get("secret"))?;
let nullifier = deserialize(row.get("nullifier"))?;
let leaf_position = deserialize(row.get("leaf_position"))?;
let oc = OwnCoin { coin, note, secret, nullifier, leaf_position };
coins.push(oc);
}
Ok(coins)
}
pub async fn put_own_coin(&self, own_coin: OwnCoin) -> Result<()> {
debug!("Putting own coin into wallet database");
let coin = serialize(&own_coin.coin.to_bytes());
let serial = serialize(&own_coin.note.serial);
let coin_blind = serialize(&own_coin.note.coin_blind);
let value_blind = serialize(&own_coin.note.value_blind);
let token_blind = serialize(&own_coin.note.token_blind);
let value = serialize(&own_coin.note.value);
let token_id = serialize(&own_coin.note.token_id);
let secret = serialize(&own_coin.secret);
let nullifier = serialize(&own_coin.nullifier);
let leaf_position = serialize(&own_coin.leaf_position);
let memo = serialize(&own_coin.note.memo);
let is_spent: u8 = 0;
let mut conn = self.conn.acquire().await?;
sqlx::query(
"INSERT OR REPLACE INTO coins
(coin, serial, coin_blind, valcom_blind, token_blind, value,
token_id, secret, is_spent, nullifier, leaf_position, memo)
VALUES
(?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12);",
)
.bind(coin)
.bind(serial)
.bind(coin_blind)
.bind(value_blind)
.bind(token_blind)
.bind(value)
.bind(token_id)
.bind(secret)
.bind(is_spent)
.bind(nullifier)
.bind(leaf_position)
.bind(memo)
.execute(&mut conn)
.await?;
Ok(())
}
pub async fn remove_own_coins(&self) -> Result<()> {
debug!("Removing own coins from wallet database");
let mut conn = self.conn.acquire().await?;
sqlx::query("DROP TABLE coins;").execute(&mut conn).await?;
Ok(())
}
pub async fn confirm_spend_coin(&self, coin: &Coin) -> Result<()> {
debug!("Confirm spend coin");
let is_spent = 1;
let coin = serialize(coin);
let mut conn = self.conn.acquire().await?;
sqlx::query("UPDATE coins SET is_spent = ?1 WHERE coin = ?2;")
.bind(is_spent)
.bind(coin)
.execute(&mut conn)
.await?;
Ok(())
}
pub async fn revert_spend_coin(&self, coin: &Coin) -> Result<()> {
debug!("Revert spend coin");
let is_spent = 0;
let coin = serialize(coin);
let mut conn = self.conn.acquire().await?;
sqlx::query("UPDATE coins SET is_spent = ?1 WHERE coin = ?2;")
.bind(is_spent)
.bind(coin)
.execute(&mut conn)
.await?;
Ok(())
}
pub async fn get_balance(&self, token_id: TokenId) -> Result<Option<Balance>> {
debug!("Getting balance of token ID");
let is_spent = 0;
let id = serialize(&token_id);
let mut conn = self.conn.acquire().await?;
let row = sqlx::query(
"SELECT value, token_id, nullifier FROM coins WHERE token_id = ?1 AND is_spent = ?2;",
)
.bind(id)
.bind(is_spent)
.fetch_optional(&mut conn)
.await?;
let balance = match row {
Some(b) => {
let value = deserialize(b.get("value"))?;
let token_id = deserialize(b.get("token_id"))?;
let nullifier = deserialize(b.get("nullifier"))?;
Some(Balance { token_id, value, nullifier })
}
None => None,
};
Ok(balance)
}
pub async fn get_balances(&self) -> Result<Balances> {
debug!("Getting tokens and balances");
let is_spent = 0;
let mut conn = self.conn.acquire().await?;
let rows = sqlx::query("SELECT value, token_id, nullifier FROM coins WHERE is_spent = ?1;")
.bind(is_spent)
.fetch_all(&mut conn)
.await?;
debug!("Found {} rows", rows.len());
let mut list = vec![];
for row in rows {
let value = deserialize(row.get("value"))?;
let token_id = deserialize(row.get("token_id"))?;
let nullifier = deserialize(row.get("nullifier"))?;
list.push(Balance { token_id, value, nullifier });
}
Ok(Balances { list })
}
pub async fn get_token_id(&self) -> Result<Vec<TokenId>> {
debug!("Getting token ID");
let is_spent = 0;
let mut conn = self.conn.acquire().await?;
let rows = sqlx::query("SELECT token_id FROM coins WHERE is_spent = ?1;")
.bind(is_spent)
.fetch_all(&mut conn)
.await?;
let mut token_ids = vec![];
for row in rows {
let token_id = deserialize(row.get("token_id"))?;
token_ids.push(token_id);
}
Ok(token_ids)
}
pub async fn token_id_exists(&self, token_id: TokenId) -> Result<bool> {
debug!("Checking if token ID exists");
let is_spent = 0;
let id = serialize(&token_id);
let mut conn = self.conn.acquire().await?;
let id_check = sqlx::query("SELECT * FROM coins WHERE token_id = ?1 AND is_spent = ?2;")
.bind(id)
.bind(is_spent)
.fetch_optional(&mut conn)
.await?;
Ok(id_check.is_some())
}
pub async fn test_wallet(&self) -> Result<()> {
debug!("Testing wallet");
let mut conn = self.conn.acquire().await?;
let _row = sqlx::query("SELECT * FROM keys").fetch_one(&mut conn).await?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::crypto::types::{DrkCoinBlind, DrkSerial, DrkValueBlind};
use darkfi_sdk::crypto::{poseidon_hash, MerkleNode};
use incrementalmerkletree::Tree;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
const WPASS: &str = "darkfi";
fn dummy_coin(s: &SecretKey, v: u64, t: &TokenId) -> OwnCoin {
let serial = DrkSerial::random(&mut OsRng);
let note = Note {
serial,
value: v,
token_id: *t,
coin_blind: DrkCoinBlind::random(&mut OsRng),
value_blind: DrkValueBlind::random(&mut OsRng),
token_blind: DrkValueBlind::random(&mut OsRng),
memo: vec![],
};
let coin = Coin(pallas::Base::random(&mut OsRng));
let nullifier = Nullifier::from(poseidon_hash::<2>([s.inner(), serial]));
let leaf_position: incrementalmerkletree::Position = 0.into();
OwnCoin { coin, note, secret: *s, nullifier, leaf_position }
}
#[async_std::test]
async fn test_walletdb() -> Result<()> {
let wallet = WalletDb::new("sqlite::memory:", WPASS).await?;
let keypair = Keypair::random(&mut OsRng);
// init_db()
wallet.init_db().await?;
// tree_gen()
let mut tree1 = wallet.tree_gen().await?;
// put_keypair()
wallet.put_keypair(&keypair).await?;
let token_id = TokenId::from(pallas::Base::random(&mut OsRng));
let c0 = dummy_coin(&keypair.secret, 69, &token_id);
let c1 = dummy_coin(&keypair.secret, 420, &token_id);
let c2 = dummy_coin(&keypair.secret, 42, &token_id);
let c3 = dummy_coin(&keypair.secret, 11, &token_id);
// put_own_coin()
wallet.put_own_coin(c0.clone()).await?;
tree1.append(&MerkleNode::from(c0.coin.0));
tree1.witness();
wallet.put_own_coin(c1.clone()).await?;
tree1.append(&MerkleNode::from(c1.coin.0));
tree1.witness();
wallet.put_own_coin(c2.clone()).await?;
tree1.append(&MerkleNode::from(c2.coin.0));
tree1.witness();
wallet.put_own_coin(c3.clone()).await?;
tree1.append(&MerkleNode::from(c3.coin.0));
tree1.witness();
// We'll check this merkle root corresponds to the one we'll retrieve.
let root1 = tree1.root(0).unwrap();
// put_tree()
wallet.put_tree(&tree1).await?;
// get_token_id()
let id = wallet.get_token_id().await?;
assert_eq!(id.len(), 4);
for i in id {
assert_eq!(i, token_id);
assert!(wallet.token_id_exists(i).await?);
}
// get_balance()
let balance = wallet.get_balance(token_id).await?;
assert_eq!(balance.unwrap().value, 69);
// get_balances()
let balances = wallet.get_balances().await?;
assert_eq!(balances.list.len(), 4);
assert_eq!(balances.list[1].value, 420);
assert_eq!(balances.list[2].value, 42);
assert_eq!(balances.list[3].token_id, token_id);
/////////////////
//// keypair ////
/////////////////
let keypair2 = Keypair::random(&mut OsRng);
// add new keypair
wallet.put_keypair(&keypair2).await?;
// get all keypairs
let keypairs = wallet.get_keypairs().await?;
assert_eq!(keypair, keypairs[0]);
assert_eq!(keypair2, keypairs[1]);
// set the keypair at index 1 as the default keypair
wallet.set_default_keypair(&keypair2.public).await?;
// get default keypair
assert_eq!(keypair2, wallet.get_default_keypair_or_create_one().await?);
// get_own_coins()
let own_coins = wallet.get_own_coins().await?;
assert_eq!(own_coins.len(), 4);
assert_eq!(own_coins[0], c0);
assert_eq!(own_coins[1], c1);
assert_eq!(own_coins[2], c2);
assert_eq!(own_coins[3], c3);
// get_tree()
let tree2 = wallet.get_tree().await?;
let root2 = tree2.root(0).unwrap();
assert_eq!(root1, root2);
// Let's try it once more to test sql replacing.
wallet.put_tree(&tree2).await?;
let tree3 = wallet.get_tree().await?;
let root3 = tree3.root(0).unwrap();
assert_eq!(root2, root3);
Ok(())
}
}

View File

@@ -19,11 +19,11 @@
use halo2_proofs::{
arithmetic::FieldExt,
circuit::{AssignedCell, Chip, Layouter},
pasta::pallas,
plonk,
plonk::{Advice, Column, ConstraintSystem, Constraints, Selector},
poly::Rotation,
};
use pasta_curves::pallas;
pub trait ArithInstruction<F: FieldExt>: Chip<F> {
fn add(

View File

@@ -224,9 +224,9 @@ mod tests {
use halo2_proofs::{
circuit::floor_planner,
dev::{CircuitLayout, MockProver},
pasta::{arithmetic::FieldExt, group::ff::PrimeField},
plonk::Circuit,
};
use pasta_curves::{arithmetic::FieldExt, group::ff::PrimeField};
macro_rules! test_circuit {
($window_size:expr, $num_bits:expr, $num_windows:expr) => {

View File

@@ -18,12 +18,12 @@
use darkfi_serial::{SerialDecodable, SerialEncodable};
use halo2_proofs::{
pasta::{pallas, vesta},
plonk,
plonk::{Circuit, SingleVerifier},
poly::commitment::Params,
transcript::{Blake2bRead, Blake2bWrite},
};
use pasta_curves::{pallas, vesta};
use rand::RngCore;
#[derive(Clone, Debug)]

View File

@@ -19,8 +19,10 @@
//! VM stack type abstractions
use darkfi_sdk::crypto::{constants::OrchardFixedBases, MerkleNode};
use halo2_gadgets::ecc::{chip::EccChip, FixedPoint, FixedPointBaseField, FixedPointShort, Point};
use halo2_proofs::circuit::{AssignedCell, Value};
use pasta_curves::pallas;
use halo2_proofs::{
circuit::{AssignedCell, Value},
pasta::pallas,
};
use crate::zkas::{decoder::ZkBinary, types::VarType};

View File

@@ -480,6 +480,6 @@ impl Analyzer {
let _ = stdout.write(msg).unwrap();
stdout.flush().unwrap();
let _ = stdin().read(&mut [0]).unwrap();
write!(stdout, "{}{}\r", termion::cursor::Up(1), termion::clear::CurrentLine).unwrap();
write!(stdout, "\x1b[1A\r\x1b[K\r").unwrap();
}
}

View File

@@ -18,8 +18,6 @@
use std::{io, io::Write, process};
use termion::{color, style};
pub(super) struct ErrorEmitter {
namespace: String,
file: String,
@@ -61,27 +59,13 @@ impl ErrorEmitter {
let mut handle = stderr.lock();
match typ {
"error" => write!(
handle,
"{}{}{} error:{} {}",
style::Bold,
color::Fg(color::Red),
self.namespace,
style::Reset,
msg
)
.unwrap(),
"error" => {
write!(handle, "\x1b[31;1m{} error:\x1b[0m {}", self.namespace, msg).unwrap()
}
"warning" => write!(
handle,
"{}{}{} warning:{} {}",
style::Bold,
color::Fg(color::Yellow),
self.namespace,
style::Reset,
msg
)
.unwrap(),
"warning" => {
write!(handle, "\x1b[33;1m{} warning:\x1b[0m {}", self.namespace, msg).unwrap()
}
_ => unreachable!(),
};

View File

@@ -16,7 +16,10 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use darkfi_sdk::crypto::{pedersen::pedersen_commitment_u64, MerkleNode, PublicKey, SecretKey};
use darkfi_sdk::{
crypto::{pedersen::pedersen_commitment_u64, MerkleNode, PublicKey, SecretKey},
incrementalmerkletree::{bridgetree::BridgeTree, Tree},
};
use halo2_gadgets::poseidon::{
primitives as poseidon,
primitives::{ConstantLength, P128Pow5T3},
@@ -26,7 +29,6 @@ use halo2_proofs::{
circuit::Value,
pasta::{group::Curve, pallas},
};
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use rand::rngs::OsRng;
use simplelog::{ColorChoice, Config, LevelFilter, TermLogger, TerminalMode};